From f427f85331aa3a0ec7641e81ec64eea75bc47df7 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 19 May 2021 19:35:49 +0100 Subject: [PATCH 01/53] First steps in parallel rules implementation. --- .../fileformats/_nc_load_rules/__init__.py | 1 + lib/iris/fileformats/_nc_load_rules/engine.py | 123 ++ .../fileformats/_nc_load_rules/helpers.py | 1303 +++++++++++++++++ lib/iris/fileformats/_nc_load_rules/rules.py | 304 ++++ lib/iris/fileformats/netcdf.py | 30 +- lib/iris/tests/test_netcdf.py | 17 + 6 files changed, 1776 insertions(+), 2 deletions(-) create mode 100644 lib/iris/fileformats/_nc_load_rules/__init__.py create mode 100644 lib/iris/fileformats/_nc_load_rules/engine.py create mode 100644 lib/iris/fileformats/_nc_load_rules/helpers.py create mode 100644 lib/iris/fileformats/_nc_load_rules/rules.py diff --git a/lib/iris/fileformats/_nc_load_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py new file mode 100644 index 0000000000..cfbff5bc7c --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -0,0 +1 @@ +# Support for replacing Pyke rules. diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py new file mode 100644 index 0000000000..780858df81 --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -0,0 +1,123 @@ +""" +A simple mimic of the Pyke 'knwoledge_engine', for interfacing to the routines +in 'iris.fileformats.netcdf' with minimal changes to that code. + +The core of this is the 'Engine' class, which mimics the Pyke engine operations, +as used by our code to translate each data cube. + +engine.get_kb() also returns a FactEntity object, which mimics *just enough* +API of a Pyke.knowlege_base, so that we can list its case-specific facts, as +used in :meth:`iris.fileformats.netcdf.pyke_stats`. + +""" +from .rules import run_rules + + +class FactList: + def __init__(self): + self.case_specific_facts = [] + + +class FactEntity: + # To support: + """ + kb_facts = engine.get_kb(_PYKE_FACT_BASE) + + for key in kb_facts.entity_lists.keys(): + for arg in kb_facts.entity_lists[key].case_specific_facts: + print("\t%s%s" % (key, arg)) + + """ + + def __init__(self): + self.entity_lists = {} + + def add_fact(self, fact_name, args): + if fact_name not in self.entity_lists: + self.entity_lists[fact_name] = FactList() + fact_list = self.entity_lists[fact_name] + fact_list.case_specific_facts.append(tuple(args)) + + def sect_facts(self, entity_name): + if entity_name in self.entity_lists: + facts = self.entity_lists.get(entity_name).case_specific_facts + else: + facts = [] + return facts + + +class Engine: + """ + A minimal mimic of a Pyke.engine. + + Provides just enough API so that the existing code in + :mod:`iris.fileformats.netcdf` can interface with our new rules functions. + + """ + + def __init__(self): + """Init new engine.""" + self.reset() + + def reset(self): + """Reset the engine = remove all facts.""" + self.facts = FactEntity() + + def activate(self, rules_base_str=None): + """ + Run all the translation rules to produce a single output cube. + + This implicitly references the output variable for this operation, + set by engine.cf_var (the variable name). + + The rules operation itself is coded elsewhere, + in :mod:`iris.fileformats.netcdf._nc_load_rules.rules`. + + """ + run_rules(self) + + def print_stats(self): + """No-op, called by :meth:`iris.fileformats.netcdf.pyke_stats`.""" + pass + + def add_case_specific_fact(self, kb_name, fact_name, fact_arglist): + """ + Record a fact about the current output operation. + + Roughly, self.facts.entity_lists[fact_name].append(fact_arglist). + + """ + self.facts.add_fact(fact_name, fact_arglist) + + def get_kb(self, fact_base_str=None): + """ + Get a FactEntity, which mimic (bits of) a knowledge-base. + + Just allowing + :meth:`iris.fileformats.netcdf.pyke_stats` to list the facts. + + """ + return self.facts + + def fact_list(self, fact_name): + """ + Return the facts (arg-lists) for one fact name. + + A shorthand form used only by the new rules routines. + + AKA 'case-specific-facts', in the original. + Roughly "return self.facts.entity_lists[fact_name]". + + """ + return self.facts.sect_facts(fact_name) + + def add_fact(self, fact_name, fact_arglist): + """ + Add a new fact. + + A shorthand form used only by the new rules routines. + + """ + self.add_case_specific_fact( + kb_name="", fact_name=fact_name, fact_arglist=fact_arglist + ) diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py new file mode 100644 index 0000000000..9a908a95a1 --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -0,0 +1,1303 @@ +""" +All the pure-Python 'helper' functions which previously included in the Pyke +rules database. + +Initially these haven't changed. +The new rules approach is still calling most of them. + +""" + +import warnings + +import cf_units +import numpy as np +import numpy.ma as ma + +import iris.aux_factory +from iris.common.mixin import _get_valid_standard_name +import iris.coords +import iris.coord_systems +import iris.fileformats.cf as cf +import iris.fileformats.netcdf +from iris.fileformats.netcdf import ( + _get_cf_var_data, + parse_cell_methods, + UnknownCellMethodWarning, +) +import iris.exceptions +import iris.std_names +import iris.util + + +# +# UD Units Constants (based on Unidata udunits.dat definition file) +# +UD_UNITS_LAT = [ + "degrees_north", + "degree_north", + "degree_n", + "degrees_n", + "degreen", + "degreesn", + "degrees", + "degrees north", + "degree north", + "degree n", + "degrees n", +] +UD_UNITS_LON = [ + "degrees_east", + "degree_east", + "degree_e", + "degrees_e", + "degreee", + "degreese", + "degrees", + "degrees east", + "degree east", + "degree e", + "degrees e", +] +UNKNOWN_UNIT_STRING = "?" +NO_UNIT_STRING = "-" + +# +# CF Dimensionless Vertical Coordinates +# +CF_COORD_VERTICAL = { + "atmosphere_ln_pressure_coordinate": ["p0", "lev"], + "atmosphere_sigma_coordinate": ["sigma", "ps", "ptop"], + "atmosphere_hybrid_sigma_pressure_coordinate": ["a", "b", "ps", "p0"], + "atmosphere_hybrid_height_coordinate": ["a", "b", "orog"], + "atmosphere_sleve_coordinate": [ + "a", + "b1", + "b2", + "ztop", + "zsurf1", + "zsurf2", + ], + "ocean_sigma_coordinate": ["sigma", "eta", "depth"], + "ocean_s_coordinate": ["s", "eta", "depth", "a", "b", "depth_c"], + "ocean_sigma_z_coordinate": [ + "sigma", + "eta", + "depth", + "depth_c", + "nsigma", + "zlev", + ], + "ocean_double_sigma_coordinate": [ + "sigma", + "depth", + "z1", + "z2", + "a", + "href", + "k_c", + ], + "ocean_s_coordinate_g1": ["s", "eta", "depth", "depth_c", "C"], + "ocean_s_coordinate_g2": ["s", "eta", "depth", "depth_c", "C"], +} + +# +# CF Grid Mappings +# +CF_GRID_MAPPING_ALBERS = "albers_conical_equal_area" +CF_GRID_MAPPING_AZIMUTHAL = "azimuthal_equidistant" +CF_GRID_MAPPING_LAMBERT_AZIMUTHAL = "lambert_azimuthal_equal_area" +CF_GRID_MAPPING_LAMBERT_CONFORMAL = "lambert_conformal_conic" +CF_GRID_MAPPING_LAMBERT_CYLINDRICAL = "lambert_cylindrical_equal_area" +CF_GRID_MAPPING_LAT_LON = "latitude_longitude" +CF_GRID_MAPPING_MERCATOR = "mercator" +CF_GRID_MAPPING_ORTHO = "orthographic" +CF_GRID_MAPPING_POLAR = "polar_stereographic" +CF_GRID_MAPPING_ROTATED_LAT_LON = "rotated_latitude_longitude" +CF_GRID_MAPPING_STEREO = "stereographic" +CF_GRID_MAPPING_TRANSVERSE = "transverse_mercator" +CF_GRID_MAPPING_VERTICAL = "vertical_perspective" +CF_GRID_MAPPING_GEOSTATIONARY = "geostationary" + +# +# CF Attribute Names. +# +CF_ATTR_AXIS = "axis" +CF_ATTR_BOUNDS = "bounds" +CF_ATTR_CALENDAR = "calendar" +CF_ATTR_CLIMATOLOGY = "climatology" +CF_ATTR_GRID_INVERSE_FLATTENING = "inverse_flattening" +CF_ATTR_GRID_EARTH_RADIUS = "earth_radius" +CF_ATTR_GRID_MAPPING_NAME = "grid_mapping_name" +CF_ATTR_GRID_NORTH_POLE_LAT = "grid_north_pole_latitude" +CF_ATTR_GRID_NORTH_POLE_LON = "grid_north_pole_longitude" +CF_ATTR_GRID_NORTH_POLE_GRID_LON = "north_pole_grid_longitude" +CF_ATTR_GRID_SEMI_MAJOR_AXIS = "semi_major_axis" +CF_ATTR_GRID_SEMI_MINOR_AXIS = "semi_minor_axis" +CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN = "latitude_of_projection_origin" +CF_ATTR_GRID_LON_OF_PROJ_ORIGIN = "longitude_of_projection_origin" +CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" +CF_ATTR_GRID_FALSE_EASTING = "false_easting" +CF_ATTR_GRID_FALSE_NORTHING = "false_northing" +CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN = "scale_factor_at_projection_origin" +CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN = "scale_factor_at_central_meridian" +CF_ATTR_GRID_LON_OF_CENT_MERIDIAN = "longitude_of_central_meridian" +CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" +CF_ATTR_GRID_PERSPECTIVE_HEIGHT = "perspective_point_height" +CF_ATTR_GRID_SWEEP_ANGLE_AXIS = "sweep_angle_axis" +CF_ATTR_POSITIVE = "positive" +CF_ATTR_STD_NAME = "standard_name" +CF_ATTR_LONG_NAME = "long_name" +CF_ATTR_UNITS = "units" +CF_ATTR_CELL_METHODS = "cell_methods" + +# +# CF Attribute Value Constants. +# +# Attribute - axis. +CF_VALUE_AXIS_X = "x" +CF_VALUE_AXIS_Y = "y" +CF_VALUE_AXIS_T = "t" +CF_VALUE_AXIS_Z = "z" + + +# Attribute - positive. +CF_VALUE_POSITIVE = ["down", "up"] + +# Attribute - standard_name. +CF_VALUE_STD_NAME_LAT = "latitude" +CF_VALUE_STD_NAME_LON = "longitude" +CF_VALUE_STD_NAME_GRID_LAT = "grid_latitude" +CF_VALUE_STD_NAME_GRID_LON = "grid_longitude" +CF_VALUE_STD_NAME_PROJ_X = "projection_x_coordinate" +CF_VALUE_STD_NAME_PROJ_Y = "projection_y_coordinate" + + +################################################################################ +def build_cube_metadata(engine): + """Add the standard meta data to the cube.""" + + cf_var = engine.cf_var + cube = engine.cube + + # Determine the cube's name attributes + cube.var_name = cf_var.cf_name + standard_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + long_name = getattr(cf_var, CF_ATTR_LONG_NAME, None) + cube.long_name = long_name + + if standard_name is not None: + try: + cube.standard_name = _get_valid_standard_name(standard_name) + except ValueError: + if cube.long_name is not None: + cube.attributes["invalid_standard_name"] = standard_name + else: + cube.long_name = standard_name + + # Determine the cube units. + attr_units = get_attr_units(cf_var, cube.attributes) + cube.units = attr_units + + # Incorporate cell methods + nc_att_cell_methods = getattr(cf_var, CF_ATTR_CELL_METHODS, None) + with warnings.catch_warnings(record=True) as warning_records: + cube.cell_methods = parse_cell_methods(nc_att_cell_methods) + # Filter to get the warning we are interested in. + warning_records = [ + record + for record in warning_records + if issubclass(record.category, UnknownCellMethodWarning) + ] + if len(warning_records) > 0: + # Output an enhanced warning message. + warn_record = warning_records[0] + name = "{}".format(cf_var.cf_name) + msg = warn_record.message.args[0] + msg = msg.replace("variable", "variable {!r}".format(name)) + warnings.warn(message=msg, category=UnknownCellMethodWarning) + + # Set the cube global attributes. + for attr_name, attr_value in cf_var.cf_group.global_attributes.items(): + try: + cube.attributes[str(attr_name)] = attr_value + except ValueError as e: + msg = "Skipping global attribute {!r}: {}" + warnings.warn(msg.format(attr_name, str(e))) + + +################################################################################ +def _get_ellipsoid(cf_grid_var): + """Return the ellipsoid definition.""" + major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) + minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) + inverse_flattening = getattr( + cf_grid_var, CF_ATTR_GRID_INVERSE_FLATTENING, None + ) + + # Avoid over-specification exception. + if major is not None and minor is not None: + inverse_flattening = None + + # Check for a default spherical earth. + if major is None and minor is None and inverse_flattening is None: + major = getattr(cf_grid_var, CF_ATTR_GRID_EARTH_RADIUS, None) + + return major, minor, inverse_flattening + + +################################################################################ +def build_coordinate_system(cf_grid_var): + """Create a coordinate system from the CF-netCDF grid mapping variable.""" + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + return iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + +################################################################################ +def build_rotated_coordinate_system(engine, cf_grid_var): + """Create a rotated coordinate system from the CF-netCDF grid mapping variable.""" + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + north_pole_latitude = getattr( + cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LAT, 90.0 + ) + north_pole_longitude = getattr( + cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LON, 0.0 + ) + if north_pole_latitude is None or north_pole_longitude is None: + warnings.warn("Rotated pole position is not fully specified") + + north_pole_grid_lon = getattr( + cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0 + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + rcs = iris.coord_systems.RotatedGeogCS( + north_pole_latitude, + north_pole_longitude, + north_pole_grid_lon, + ellipsoid, + ) + + return rcs + + +################################################################################ +def build_transverse_mercator_coordinate_system(engine, cf_grid_var): + """ + Create a transverse Mercator coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + scale_factor_at_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN, None + ) + + # The following accounts for the inconsistancy in the transverse + # mercator description within the CF spec. + if longitude_of_central_meridian is None: + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + if scale_factor_at_central_meridian is None: + scale_factor_at_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.TransverseMercator( + latitude_of_projection_origin, + longitude_of_central_meridian, + false_easting, + false_northing, + scale_factor_at_central_meridian, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_lambert_conformal_coordinate_system(engine, cf_grid_var): + """ + Create a Lambert conformal conic coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.LambertConformal( + latitude_of_projection_origin, + longitude_of_central_meridian, + false_easting, + false_northing, + standard_parallel, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_stereographic_coordinate_system(engine, cf_grid_var): + """ + Create a stereographic coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + # Iris currently only supports Stereographic projections with a scale + # factor of 1.0. This is checked elsewhere. + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.Stereographic( + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting, + false_northing, + true_scale_lat=None, + ellipsoid=ellipsoid, + ) + + return cs + + +################################################################################ +def build_mercator_coordinate_system(engine, cf_grid_var): + """ + Create a Mercator coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + # Iris currently only supports Mercator projections with specific + # values for false_easting, false_northing, + # scale_factor_at_projection_origin and standard_parallel. These are + # checked elsewhere. + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.Mercator( + longitude_of_projection_origin, ellipsoid=ellipsoid + ) + + return cs + + +################################################################################ +def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): + """ + Create a lambert azimuthal equal area coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.LambertAzimuthalEqualArea( + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting, + false_northing, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_albers_equal_area_coordinate_system(engine, cf_grid_var): + """ + Create a albers conical equal area coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + standard_parallels = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.AlbersEqualArea( + latitude_of_projection_origin, + longitude_of_central_meridian, + false_easting, + false_northing, + standard_parallels, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_vertical_perspective_coordinate_system(engine, cf_grid_var): + """ + Create a vertical perspective coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + perspective_point_height = getattr( + cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.VerticalPerspective( + latitude_of_projection_origin, + longitude_of_projection_origin, + perspective_point_height, + false_easting, + false_northing, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_geostationary_coordinate_system(engine, cf_grid_var): + """ + Create a geostationary coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + perspective_point_height = getattr( + cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + sweep_angle_axis = getattr( + cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.Geostationary( + latitude_of_projection_origin, + longitude_of_projection_origin, + perspective_point_height, + sweep_angle_axis, + false_easting, + false_northing, + ellipsoid, + ) + + return cs + + +################################################################################ +def get_attr_units(cf_var, attributes): + attr_units = getattr(cf_var, CF_ATTR_UNITS, UNKNOWN_UNIT_STRING) + if not attr_units: + attr_units = UNKNOWN_UNIT_STRING + + # Sanitise lat/lon units. + if attr_units in UD_UNITS_LAT or attr_units in UD_UNITS_LON: + attr_units = "degrees" + + # Graceful loading of invalid units. + try: + cf_units.as_unit(attr_units) + except ValueError: + # Using converted unicode message. Can be reverted with Python 3. + msg = "Ignoring netCDF variable {!r} invalid units {!r}".format( + cf_var.cf_name, attr_units + ) + warnings.warn(msg) + attributes["invalid_units"] = attr_units + attr_units = UNKNOWN_UNIT_STRING + + if np.issubdtype(cf_var.dtype, np.str_): + attr_units = NO_UNIT_STRING + + if any( + hasattr(cf_var.cf_data, name) + for name in ("flag_values", "flag_masks", "flag_meanings") + ): + attr_units = cf_units._NO_UNIT_STRING + + # Get any assoicated calendar for a time reference coordinate. + if cf_units.as_unit(attr_units).is_time_reference(): + attr_calendar = getattr(cf_var, CF_ATTR_CALENDAR, None) + + if attr_calendar: + attr_units = cf_units.Unit(attr_units, calendar=attr_calendar) + + return attr_units + + +################################################################################ +def get_names(cf_coord_var, coord_name, attributes): + """Determine the standard_name, long_name and var_name attributes.""" + + standard_name = getattr(cf_coord_var, CF_ATTR_STD_NAME, None) + long_name = getattr(cf_coord_var, CF_ATTR_LONG_NAME, None) + cf_name = str(cf_coord_var.cf_name) + + if standard_name is not None: + try: + standard_name = _get_valid_standard_name(standard_name) + except ValueError: + if long_name is not None: + attributes["invalid_standard_name"] = standard_name + if coord_name is not None: + standard_name = coord_name + else: + standard_name = None + else: + if coord_name is not None: + attributes["invalid_standard_name"] = standard_name + standard_name = coord_name + else: + standard_name = None + + else: + if coord_name is not None: + standard_name = coord_name + + # Last attempt to set the standard name to something meaningful. + if standard_name is None: + if cf_name in iris.std_names.STD_NAMES: + standard_name = cf_name + + return (standard_name, long_name, cf_name) + + +################################################################################ +def get_cf_bounds_var(cf_coord_var): + """ + Return the CF variable representing the bounds of a coordinate + variable. + + """ + attr_bounds = getattr(cf_coord_var, CF_ATTR_BOUNDS, None) + attr_climatology = getattr(cf_coord_var, CF_ATTR_CLIMATOLOGY, None) + + # Determine bounds, prefering standard bounds over climatology. + # NB. No need to raise a warning if the bounds/climatology + # variable is missing, as that will already have been done by + # iris.fileformats.cf. + cf_bounds_var = None + climatological = False + if attr_bounds is not None: + bounds_vars = cf_coord_var.cf_group.bounds + if attr_bounds in bounds_vars: + cf_bounds_var = bounds_vars[attr_bounds] + elif attr_climatology is not None: + climatology_vars = cf_coord_var.cf_group.climatology + if attr_climatology in climatology_vars: + cf_bounds_var = climatology_vars[attr_climatology] + climatological = True + + if attr_bounds is not None and attr_climatology is not None: + warnings.warn( + "Ignoring climatology in favour of bounds attribute " + "on NetCDF variable {!r}.".format(cf_coord_var.cf_name) + ) + + return cf_bounds_var, climatological + + +################################################################################ +def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): + """ + Return a bounds_data array with the vertex dimension as the most + rapidly varying. + + .. note:: + + This function assumes the dimension names of the coordinate + variable match those of the bounds variable in order to determine + which is the vertex dimension. + + + """ + vertex_dim_names = set(cf_bounds_var.dimensions).difference( + cf_coord_var.dimensions + ) + if len(vertex_dim_names) != 1: + msg = ( + "Too many dimension names differ between coordinate " + "variable {!r} and the bounds variable {!r}. " + "Expected 1, got {}." + ) + raise ValueError( + msg.format( + str(cf_coord_var.cf_name), + str(cf_bounds_var.cf_name), + len(vertex_dim_names), + ) + ) + vertex_dim = cf_bounds_var.dimensions.index(*vertex_dim_names) + bounds_data = np.rollaxis( + bounds_data.view(), vertex_dim, len(bounds_data.shape) + ) + return bounds_data + + +################################################################################ +def build_dimension_coordinate( + engine, cf_coord_var, coord_name=None, coord_system=None +): + """Create a dimension coordinate (DimCoord) and add it to the cube.""" + + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + attr_units = get_attr_units(cf_coord_var, attributes) + points_data = cf_coord_var[:] + # Gracefully fill points masked array. + if ma.is_masked(points_data): + points_data = ma.filled(points_data) + msg = "Gracefully filling {!r} dimension coordinate masked points" + warnings.warn(msg.format(str(cf_coord_var.cf_name))) + + # Get any coordinate bounds. + cf_bounds_var, climatological = get_cf_bounds_var(cf_coord_var) + if cf_bounds_var is not None: + bounds_data = cf_bounds_var[:] + # Gracefully fill bounds masked array. + if ma.is_masked(bounds_data): + bounds_data = ma.filled(bounds_data) + msg = "Gracefully filling {!r} dimension coordinate masked bounds" + warnings.warn(msg.format(str(cf_coord_var.cf_name))) + # Handle transposed bounds where the vertex dimension is not + # the last one. Test based on shape to support different + # dimension names. + if cf_bounds_var.shape[:-1] != cf_coord_var.shape: + bounds_data = reorder_bounds_data( + bounds_data, cf_bounds_var, cf_coord_var + ) + else: + bounds_data = None + + # Determine whether the coordinate is circular. + circular = False + if ( + points_data.ndim == 1 + and coord_name in [CF_VALUE_STD_NAME_LON, CF_VALUE_STD_NAME_GRID_LON] + and cf_units.Unit(attr_units) + in [cf_units.Unit("radians"), cf_units.Unit("degrees")] + ): + modulus_value = cf_units.Unit(attr_units).modulus + circular = iris.util._is_circular( + points_data, modulus_value, bounds=bounds_data + ) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the coordinate being built. + common_dims = [ + dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names( + cf_coord_var, coord_name, attributes + ) + + # Create the coordinate. + try: + coord = iris.coords.DimCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + bounds=bounds_data, + attributes=attributes, + coord_system=coord_system, + circular=circular, + climatological=climatological, + ) + except ValueError as e_msg: + # Attempt graceful loading. + coord = iris.coords.AuxCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + bounds=bounds_data, + attributes=attributes, + coord_system=coord_system, + climatological=climatological, + ) + cube.add_aux_coord(coord, data_dims) + msg = ( + "Failed to create {name!r} dimension coordinate: {error}\n" + "Gracefully creating {name!r} auxiliary coordinate instead." + ) + warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) + else: + # Add the dimension coordinate to the cube. + if data_dims: + cube.add_dim_coord(coord, data_dims) + else: + # Scalar coords are placed in the aux_coords container. + cube.add_aux_coord(coord, data_dims) + + # Update the coordinate to CF-netCDF variable mapping. + engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) + + +################################################################################ +def build_auxiliary_coordinate( + engine, cf_coord_var, coord_name=None, coord_system=None +): + """Create an auxiliary coordinate (AuxCoord) and add it to the cube.""" + + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_coord_var, attributes) + + # Get any coordinate point data. + if isinstance(cf_coord_var, cf.CFLabelVariable): + points_data = cf_coord_var.cf_label_data(cf_var) + else: + points_data = _get_cf_var_data(cf_coord_var, engine.filename) + + # Get any coordinate bounds. + cf_bounds_var, climatological = get_cf_bounds_var(cf_coord_var) + if cf_bounds_var is not None: + bounds_data = _get_cf_var_data(cf_bounds_var, engine.filename) + + # Handle transposed bounds where the vertex dimension is not + # the last one. Test based on shape to support different + # dimension names. + if cf_bounds_var.shape[:-1] != cf_coord_var.shape: + # Resolving the data to a numpy array (i.e. *not* masked) for + # compatibility with array creators (i.e. dask) + bounds_data = np.asarray(bounds_data) + bounds_data = reorder_bounds_data( + bounds_data, cf_bounds_var, cf_coord_var + ) + else: + bounds_data = None + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the coordinate being built. + common_dims = [ + dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names( + cf_coord_var, coord_name, attributes + ) + + # Create the coordinate + coord = iris.coords.AuxCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + bounds=bounds_data, + attributes=attributes, + coord_system=coord_system, + climatological=climatological, + ) + + # Add it to the cube + cube.add_aux_coord(coord, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) + + +################################################################################ +def build_cell_measures(engine, cf_cm_var): + """Create a CellMeasure instance and add it to the cube.""" + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_cm_var, attributes) + + # Get (lazy) content array + data = _get_cf_var_data(cf_cm_var, engine.filename) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the coordinate being built. + common_dims = [ + dim for dim in cf_cm_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names(cf_cm_var, None, attributes) + + # Obtain the cf_measure. + measure = cf_cm_var.cf_measure + + # Create the CellMeasure + cell_measure = iris.coords.CellMeasure( + data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes, + measure=measure, + ) + + # Add it to the cube + cube.add_cell_measure(cell_measure, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["cell_measures"].append( + (cell_measure, cf_cm_var.cf_name) + ) + + +################################################################################ +def build_ancil_var(engine, cf_av_var): + """Create an AncillaryVariable instance and add it to the cube.""" + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_av_var, attributes) + + # Get (lazy) content array + data = _get_cf_var_data(cf_av_var, engine.filename) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the AV being built. + common_dims = [ + dim for dim in cf_av_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names(cf_av_var, None, attributes) + + # Create the AncillaryVariable + av = iris.coords.AncillaryVariable( + data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes, + ) + + # Add it to the cube + cube.add_ancillary_variable(av, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["ancillary_variables"].append((av, cf_av_var.cf_name)) + + +################################################################################ +def _is_lat_lon( + cf_var, ud_units, std_name, std_name_grid, axis_name, prefixes +): + """ + Determine whether the CF coordinate variable is a latitude/longitude variable. + + Ref: [CF] Section 4.1 Latitude Coordinate. + [CF] Section 4.2 Longitude Coordinate. + + """ + is_valid = False + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + + if attr_units is not None: + attr_units = attr_units.lower() + is_valid = attr_units in ud_units + + # Special case - Check for rotated pole. + if attr_units == "degrees": + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + if attr_std_name is not None: + is_valid = attr_std_name.lower() == std_name_grid + else: + is_valid = False + # TODO: check that this interpretation of axis is correct. + attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) + if attr_axis is not None: + is_valid = attr_axis.lower() == axis_name + else: + # Alternative is to check standard_name or axis. + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + + if attr_std_name is not None: + attr_std_name = attr_std_name.lower() + is_valid = attr_std_name in [std_name, std_name_grid] + if not is_valid: + is_valid = any( + [attr_std_name.startswith(prefix) for prefix in prefixes] + ) + else: + attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) + + if attr_axis is not None: + is_valid = attr_axis.lower() == axis_name + + return is_valid + + +################################################################################ +def is_latitude(engine, cf_name): + """Determine whether the CF coordinate variable is a latitude variable.""" + cf_var = engine.cf_var.cf_group[cf_name] + return _is_lat_lon( + cf_var, + UD_UNITS_LAT, + CF_VALUE_STD_NAME_LAT, + CF_VALUE_STD_NAME_GRID_LAT, + CF_VALUE_AXIS_Y, + ["lat", "rlat"], + ) + + +################################################################################ +def is_longitude(engine, cf_name): + """Determine whether the CF coordinate variable is a longitude variable.""" + cf_var = engine.cf_var.cf_group[cf_name] + return _is_lat_lon( + cf_var, + UD_UNITS_LON, + CF_VALUE_STD_NAME_LON, + CF_VALUE_STD_NAME_GRID_LON, + CF_VALUE_AXIS_X, + ["lon", "rlon"], + ) + + +################################################################################ +def is_projection_x_coordinate(engine, cf_name): + """ + Determine whether the CF coordinate variable is a + projection_x_coordinate variable. + + """ + cf_var = engine.cf_var.cf_group[cf_name] + attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( + cf_var, CF_ATTR_LONG_NAME, None + ) + return attr_name == CF_VALUE_STD_NAME_PROJ_X + + +################################################################################ +def is_projection_y_coordinate(engine, cf_name): + """ + Determine whether the CF coordinate variable is a + projection_y_coordinate variable. + + """ + cf_var = engine.cf_var.cf_group[cf_name] + attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( + cf_var, CF_ATTR_LONG_NAME, None + ) + return attr_name == CF_VALUE_STD_NAME_PROJ_Y + + +################################################################################ +def is_time(engine, cf_name): + """ + Determine whether the CF coordinate variable is a time variable. + + Ref: [CF] Section 4.4 Time Coordinate. + + """ + cf_var = engine.cf_var.cf_group[cf_name] + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + attr_axis = getattr(cf_var, CF_ATTR_AXIS, "") + try: + is_time_reference = cf_units.Unit(attr_units or 1).is_time_reference() + except ValueError: + is_time_reference = False + + return is_time_reference and ( + attr_std_name == "time" or attr_axis.lower() == CF_VALUE_AXIS_T + ) + + +################################################################################ +def is_time_period(engine, cf_name): + """Determine whether the CF coordinate variable represents a time period.""" + is_valid = False + cf_var = engine.cf_var.cf_group[cf_name] + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + + if attr_units is not None: + try: + is_valid = cf_units.is_time(attr_units) + except ValueError: + is_valid = False + + return is_valid + + +################################################################################ +def is_grid_mapping(engine, cf_name, grid_mapping): + """Determine whether the CF grid mapping variable is of the appropriate type.""" + + is_valid = False + cf_var = engine.cf_var.cf_group[cf_name] + attr_mapping_name = getattr(cf_var, CF_ATTR_GRID_MAPPING_NAME, None) + + if attr_mapping_name is not None: + is_valid = attr_mapping_name.lower() == grid_mapping + + return is_valid + + +################################################################################ +def _is_rotated(engine, cf_name, cf_attr_value): + """Determine whether the CF coordinate variable is rotated.""" + + is_valid = False + cf_var = engine.cf_var.cf_group[cf_name] + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + + if attr_std_name is not None: + is_valid = attr_std_name.lower() == cf_attr_value + else: + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + if attr_units is not None: + is_valid = attr_units.lower() == "degrees" + + return is_valid + + +################################################################################ +def is_rotated_latitude(engine, cf_name): + """Determine whether the CF coodinate variable is rotated latitude.""" + return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LAT) + + +############################################################################### +def is_rotated_longitude(engine, cf_name): + """Determine whether the CF coordinate variable is rotated longitude.""" + return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LON) + + +################################################################################ +def has_supported_mercator_parameters(engine, cf_name): + """Determine whether the CF grid mapping variable has the supported + values for the parameters of the Mercator projection.""" + + is_valid = True + cf_grid_var = engine.cf_var.cf_group[cf_name] + + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + + if false_easting is not None and false_easting != 0: + warnings.warn( + "False eastings other than 0.0 not yet supported " + "for Mercator projections" + ) + is_valid = False + if false_northing is not None and false_northing != 0: + warnings.warn( + "False northings other than 0.0 not yet supported " + "for Mercator projections" + ) + is_valid = False + if ( + scale_factor_at_projection_origin is not None + and scale_factor_at_projection_origin != 1 + ): + warnings.warn( + "Scale factors other than 1.0 not yet supported for " + "Mercator projections" + ) + is_valid = False + if standard_parallel is not None and standard_parallel != 0: + warnings.warn( + "Standard parallels other than 0.0 not yet " + "supported for Mercator projections" + ) + is_valid = False + + return is_valid + + +################################################################################ +def has_supported_stereographic_parameters(engine, cf_name): + """Determine whether the CF grid mapping variable has a value of 1.0 + for the scale_factor_at_projection_origin attribute.""" + + is_valid = True + cf_grid_var = engine.cf_var.cf_group[cf_name] + + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + + if ( + scale_factor_at_projection_origin is not None + and scale_factor_at_projection_origin != 1 + ): + warnings.warn( + "Scale factors other than 1.0 not yet supported for " + "stereographic projections" + ) + is_valid = False + + return is_valid diff --git a/lib/iris/fileformats/_nc_load_rules/rules.py b/lib/iris/fileformats/_nc_load_rules/rules.py new file mode 100644 index 0000000000..b7a83e9fa4 --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/rules.py @@ -0,0 +1,304 @@ +""" +Replacement code for the Pyke rules. + +For now, we are still emulating various aspects of how our original Pyke-based +code used the Pyke 'engine' to hold translation data, both Pyke-specific and +not : +1) basic details from the iris.fileformats.cf analysis of the file are + recorded before translating each output cube, using + "engine.assert_case_specific_fact(name, args)". + +2) this is also used to store intermediate info passed between rules, which + used to done with a "facts_cf.provides" statement in rule actions. + +3) Iris-specific info is stored in our own additional properties stored in + extra properties added to the engine object : + engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename + +Our "rules" are just action routines. +The master 'run_rules' routine decides which to call based on the info recorded +when processing each cube output. It does this in a simple explicit way, which +doesn't use any clever chaining, "trigger conditions" or rules-like behaviour. + +FOR NOW: we are still using intermediate facts to carry information between +rules. + +""" + +from . import helpers as hh +from functools import wraps + + +def convert_rulesfuncname_to_rulename(func_name): + # Given the name of a rules-func, return the name of the rule. + funcname_prefix = "rule_" + rulename_prefix = "fc_" # To match existing behaviours + rule_name = func_name + if rule_name.startswith(funcname_prefix): + rule_name = rule_name[len(funcname_prefix) :] + if not rule_name.startswith(rulename_prefix): + rule_name = rulename_prefix + rule_name + return rule_name + + +def _default_rulenamesfunc(func_name): + # A default function to deduce the rules-name from a rule-func-name. + # This (default) one assumes there are *no* additional call fact_arglist, + # i.e. the function does *not* take parameters to implement multiple rules. + rule_name = convert_rulesfuncname_to_rulename(func_name) + return rule_name + + +def rules_function(func): + # Wrap a rules function with some standard behaviour. + # Notably : engages with the rules logging process. + @wraps(func) + def inner(engine, *args, **kwargs): + # Call the original rules-func + rule_name = func(engine, *args, **kwargs) + if rule_name is None: + # Work out the corresponding rule name, and log it. + # Note: a rules returns a name string, which identifies it, + # but also may vary depending on whether it successfully + # triggered, and if so what it mathched. + rule_name = _default_rulenamesfunc(func.__name__) + engine.rule_triggered.add(rule_name) + + func._rulenames_func = _default_rulenamesfunc + return inner + + +@rules_function +def rule_default(engine): + hh.build_cube_metadata(engine) + + +_grid_types_to_checker_builder = { + hh.CF_GRID_MAPPING_LAT_LON: (None, hh.build_coordinate_system), + hh.CF_GRID_MAPPING_ROTATED_LAT_LON: ( + None, + hh.build_rotated_coordinate_system, + ), + hh.CF_GRID_MAPPING_MERCATOR: ( + hh.has_supported_mercator_parameters, + hh.build_mercator_coordinate_system, + ), + hh.CF_GRID_MAPPING_TRANSVERSE: ( + None, + hh.build_transverse_mercator_coordinate_system, + ), + hh.CF_GRID_MAPPING_STEREO: ( + hh.has_supported_stereographic_parameters, + hh.build_stereographic_coordinate_system, + ), + hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL: ( + None, + hh.build_lambert_conformal_coordinate_system, + ), + hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL: ( + None, + hh.build_lambert_azimuthal_equal_area_coordinate_system, + ), + hh.CF_GRID_MAPPING_ALBERS: ( + None, + hh.build_albers_equal_area_coordinate_system, + ), + hh.CF_GRID_MAPPING_VERTICAL: ( + None, + hh.build_vertical_perspective_coordinate_system, + ), + hh.CF_GRID_MAPPING_GEOSTATIONARY: ( + None, + hh.build_geostationary_coordinate_system, + ), +} + + +@rules_function +def rule_provides_grid_mapping(engine, gridmapping_fact): + (var_name,) = gridmapping_fact + rule_name = "fc_provides_grid_mapping" + cf_var = engine.cf_var.cf_group[var_name] + grid_mapping_type = getattr(cf_var, hh.CF_ATTR_GRID_MAPPING_NAME, None) + succeed = True + if grid_mapping_type is None: + succeed = False + rule_name += " --FAILED(no grid-mapping attr)" + else: + grid_mapping_type = grid_mapping_type.lower() + if succeed: + if grid_mapping_type in _grid_types_to_checker_builder: + checker, builder = _grid_types_to_checker_builder[ + grid_mapping_type + ] + rule_name += f"_({grid_mapping_type})" + else: + succeed = False + rule_name += f" --FAILED(unhandled type {grid_mapping_type})" + # We DON'T call this, as we already identified the type in the call. + # if succeed and not is_grid_mapping(engine, var_name, grid_mapping_type): + # succeed = False + # rule_name += f' --(FAILED is_grid_mapping)' + if succeed: + if checker is not None and not checker(engine, grid_mapping_type): + succeed = False + rule_name += f" --(FAILED check {checker.__name__})" + + if succeed: + coordinate_system = builder(engine, cf_var) + # Check there is not an existing one. + old_gridtype_fact = engine.fact_list("grid-type") + if old_gridtype_fact: + (old_gridtype,) = old_gridtype_fact + succeed = False + rule_name += ( + f" --(FAILED overwrite coord-sytem " + f"{old_gridtype} with {grid_mapping_type})" + ) + if succeed: + engine.cube_parts["coordinate_system"] = coordinate_system + engine.add_fact("grid-type", (grid_mapping_type,)) + + return rule_name + + +@rules_function +def rule_provides_coordinate(engine, dimcoord_fact): + (var_name,) = dimcoord_fact + + # Identify the coord type + # N.B. *only* to "name" the rule, for debug : no functional need. + coord_type = None + if hh.is_latitude(engine, var_name): + coord_type = "latitude" + elif hh.is_longitude(engine, var_name): + coord_type = "longitude" + elif hh.is_rotated_latitude(engine, var_name): + coord_type = "rotated_latitude" + elif hh.is_rotated_longitude(engine, var_name): + coord_type = "rotated_longitude" + elif hh.is_time(engine, var_name): + coord_type = "time" + elif hh.is_time_period(engine, var_name): + coord_type = "time_period" + elif hh.is_projection_x_coordinate(engine, var_name): + coord_type = "projection_x" + elif hh.is_projection_y_coordinate(engine, var_name): + coord_type = "projection_y" + + if coord_type is None: + # Not identified as a specific known coord_type. + # N.B. in the original rules, this does *not* trigger separate + # 'provides' and 'build' phases : there is just a single + # 'fc_default_coordinate' rule. + # Rationalise this for now by making it like the others. + # FOR NOW: ~matching old code, but they could *all* be simplified. + # TODO: combine 2 operation into 1 for ALL of these. + coord_type = "miscellaneous" + rule_name = "fc_default_coordinate_(provide-phase)" + else: + rule_name = f"fc_provides_coordinate_({coord_type})" + + engine.add_fact("provides-coordinate-(oftype)", (coord_type, var_name)) + return rule_name + + +_coordtype_to_gridtype_coordname = { + "latitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LAT), + "longitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LON), + "rotated_latitude": ( + "rotated_latitude_longitude", + hh.CF_VALUE_STD_NAME_GRID_LAT, + ), + "rotated_longitude": ( + "rotated_latitude_longitude", + hh.CF_VALUE_STD_NAME_GRID_LON, + ), + "projection_x": ("projected", hh.CF_VALUE_STD_NAME_PROJ_X), + "projection_y": ("projected", hh.CF_VALUE_STD_NAME_PROJ_Y), + "time": (None, None), + "time_period": (None, None), + "miscellaneous": (None, None), +} + + +@rules_function +def rule_build_coordinate(engine, providescoord_fact): + coord_type, var_name = providescoord_fact + cf_var = engine.cf_var.cf_group[var_name] + rule_name = f"fc_build_coordinate_{coord_type}" + grid_type, coord_name = _coordtype_to_gridtype_coordname[coord_type] + succeed = True + coord_system = None + if grid_type is not None: + if coord_type not in ("latitude", "longitude"): + # There needs to be the right sort of coordinate system + coord_system = engine.cube_parts.get("coordinate_system") + if coord_system is None: + succeed = False + rule_name += " --FAILED(no coord-system)" + # TODO else: we ***asssume*** coord-system is the right type ?? + if succeed: + hh.build_dimension_coordinate( + engine, cf_var, coord_name=coord_name, coord_system=coord_system + ) + + return rule_name + + +@rules_function +def rule_build_auxiliary_coordinate(engine, auxcoord_fact): + (var_name,) = auxcoord_fact + rule_name = "fc_build_auxiliary_coordinate" + + # FOR NOW: attempt to identify type, though it only affects rule-name? + coord_type = "" # unidentified : can be OK + if hh.is_time(engine, var_name): + coord_type = "time" + elif hh.is_time_period(engine, var_name): + coord_type = "time_period" + elif hh.is_longitude(engine, var_name): + coord_type = "longitude" + if hh.is_rotated_longitude(engine, var_name): + coord_type += "_rotated" + elif hh.is_latitude(engine, var_name): + coord_type = "latitude" + if hh.is_rotated_latitude(engine, var_name): + coord_type += "_rotated" + + if coord_type: + rule_name += f"_{coord_type}" + + cf_var = engine.cf_var.cf_group.auxiliary_coordinates[var_name] + hh.build_auxiliary_coordinate( + engine, cf_var, coord_name=hh.CF_VALUE_STD_NAME_GRID_LON + ) + + return rule_name + + +def run_rules(engine): + # default (all cubes) rule, always runs + rule_default(engine) # This should run the default rules. + + # deal with grid-mappings + grid_mapping_facts = engine.fact_list("grid_mapping") + for grid_mapping_fact in grid_mapping_facts: + rule_provides_grid_mapping(engine, grid_mapping_fact) + + # identify + record aka "PROVIDE" specific named coordinates + # N.B. cf.py id-d these as coords NOT aux-coords (stored separately) + # TODO: can probably remove this step ?? + dimcoord_facts = engine.fact_list("coordinate") + for dimcoord_fact in dimcoord_facts: + rule_provides_coordinate(engine, dimcoord_fact) + + # build coordinates + providescoord_facts = engine.fact_list("provides-coordinate-(oftype)") + for providescoord_fact in providescoord_facts: + rule_build_coordinate(engine, providescoord_fact) + + # build aux-coords + auxcoord_facts = engine.fact_list("auxiliary_coordinate") + for auxcoord_fact in auxcoord_facts: + rule_build_auxiliary_coordinate(engine, auxcoord_fact) diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 8f40131e54..91099464b1 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -381,7 +381,7 @@ def coord(self, name): return result -def _pyke_kb_engine(): +def _pyke_kb_engine_real(): """Return the PyKE knowledge engine for CF->cube conversion.""" pyke_dir = os.path.join(os.path.dirname(__file__), "_pyke_rules") @@ -412,6 +412,21 @@ def _pyke_kb_engine(): return engine +LOAD_PYKE = True + + +def _pyke_kb_engine(): + """Return a knowledge engine, or replacement object.""" + if LOAD_PYKE: + engine = _pyke_kb_engine_real() + else: + # Deferred import to avoid circularity. + import iris.fileformats._nc_load_rules.engine as nonpyke_engine + + engine = nonpyke_engine.Engine() + return engine + + class NetCDFDataProxy: """A reference to the data payload of a single NetCDF file variable.""" @@ -581,6 +596,17 @@ def _get_cf_var_data(cf_var, filename): return as_lazy_data(proxy, chunks=chunks) +class OrderedAddableList(list): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._n_add = 0 + + def add(self, msg): + self._n_add += 1 + n_add = self._n_add + self.append(f"#{n_add:03d} : {msg}") + + def _load_cube(engine, cf, cf_var, filename): from iris.cube import Cube @@ -596,7 +622,7 @@ def _load_cube(engine, cf, cf_var, filename): engine.cube = cube engine.cube_parts = {} engine.requires = {} - engine.rule_triggered = set() + engine.rule_triggered = OrderedAddableList() # set() engine.filename = filename # Assert any case-specific facts. diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 4d92274fcf..919c6db6a5 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -41,9 +41,13 @@ @tests.skip_data class TestNetCDFLoad(tests.IrisTest): def setUp(self): + iris.fileformats.netcdf.DEBUG = True + iris.fileformats.netcdf.LOAD_PYKE = False self.tmpdir = None def tearDown(self): + iris.fileformats.netcdf.DEBUG = False + iris.fileformats.netcdf.LOAD_PYKE = True if self.tmpdir is not None: shutil.rmtree(self.tmpdir) @@ -127,11 +131,24 @@ def test_load_global_xyzt_gems_iter(self): def test_load_rotated_xy_land(self): # Test loading single xy rotated pole CF-netCDF file. + iris.fileformats.netcdf.LOAD_PYKE = True + print("Pyke version:") cube = iris.load_cube( tests.get_data_path( ("NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc") ) ) + print(cube) + iris.fileformats.netcdf.LOAD_PYKE = False + print("") + print("NON-Pyke version:") + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc") + ) + ) + print(cube) + # Make sure the AuxCoords have lazy data. self.assertTrue(is_lazy_data(cube.coord("latitude").core_points())) self.assertCML(cube, ("netcdf", "netcdf_rotated_xy_land.cml")) From 1bb733892c856edcc530979b1821317115e5137c Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 21 May 2021 11:48:28 +0100 Subject: [PATCH 02/53] Rename some things + add comments. --- .../_nc_load_rules/{rules.py => actions.py} | 80 +++++++++++-------- lib/iris/fileformats/_nc_load_rules/engine.py | 9 ++- lib/iris/tests/test_netcdf.py | 2 + 3 files changed, 55 insertions(+), 36 deletions(-) rename lib/iris/fileformats/_nc_load_rules/{rules.py => actions.py} (78%) diff --git a/lib/iris/fileformats/_nc_load_rules/rules.py b/lib/iris/fileformats/_nc_load_rules/actions.py similarity index 78% rename from lib/iris/fileformats/_nc_load_rules/rules.py rename to lib/iris/fileformats/_nc_load_rules/actions.py index b7a83e9fa4..5ce683b733 100644 --- a/lib/iris/fileformats/_nc_load_rules/rules.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -16,12 +16,15 @@ engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename Our "rules" are just action routines. -The master 'run_rules' routine decides which to call based on the info recorded -when processing each cube output. It does this in a simple explicit way, which -doesn't use any clever chaining, "trigger conditions" or rules-like behaviour. +The top-level 'run_actions' routine decides which actions to call, based on the +info recorded when processing each cube output. It does this in a simple +explicit way, which doesn't use any clever chaining, "trigger conditions" or +other rule-type logic. -FOR NOW: we are still using intermediate facts to carry information between -rules. +TODO: remove the use of intermediate "facts" to carry information between +actions. This mimics older behaviour, so is still useful while we are still +comparing behaviour with the old Pyke rules (debugging). But once that is no +longer useful, this can be considerably simplified. """ @@ -29,9 +32,9 @@ from functools import wraps -def convert_rulesfuncname_to_rulename(func_name): - # Given the name of a rules-func, return the name of the rule. - funcname_prefix = "rule_" +def convert_actionname_to_rulename(func_name): + # Given the name of an action-func, return the name of the rule. + funcname_prefix = "action_" rulename_prefix = "fc_" # To match existing behaviours rule_name = func_name if rule_name.startswith(funcname_prefix): @@ -42,15 +45,13 @@ def convert_rulesfuncname_to_rulename(func_name): def _default_rulenamesfunc(func_name): - # A default function to deduce the rules-name from a rule-func-name. - # This (default) one assumes there are *no* additional call fact_arglist, - # i.e. the function does *not* take parameters to implement multiple rules. - rule_name = convert_rulesfuncname_to_rulename(func_name) + # A simple default function to deduce the rules-name from an action-name. + rule_name = convert_actionname_to_rulename(func_name) return rule_name -def rules_function(func): - # Wrap a rules function with some standard behaviour. +def action_function(func): + # Wrap an action function with some standard behaviour. # Notably : engages with the rules logging process. @wraps(func) def inner(engine, *args, **kwargs): @@ -58,9 +59,9 @@ def inner(engine, *args, **kwargs): rule_name = func(engine, *args, **kwargs) if rule_name is None: # Work out the corresponding rule name, and log it. - # Note: a rules returns a name string, which identifies it, + # Note: an action returns a name string, which identifies it, # but also may vary depending on whether it successfully - # triggered, and if so what it mathched. + # triggered, and if so what it matched. rule_name = _default_rulenamesfunc(func.__name__) engine.rule_triggered.add(rule_name) @@ -68,11 +69,16 @@ def inner(engine, *args, **kwargs): return inner -@rules_function -def rule_default(engine): +@action_function +def action_default(engine): hh.build_cube_metadata(engine) +# Lookup table used by 'action_provides_grid_mapping'. +# Maps each supported CF grid-mapping-name to a pair of handling ("helper") +# routines: +# (@0) a validity-checker (or None) +# (@1) a coord-system builder function. _grid_types_to_checker_builder = { hh.CF_GRID_MAPPING_LAT_LON: (None, hh.build_coordinate_system), hh.CF_GRID_MAPPING_ROTATED_LAT_LON: ( @@ -114,8 +120,8 @@ def rule_default(engine): } -@rules_function -def rule_provides_grid_mapping(engine, gridmapping_fact): +@action_function +def action_provides_grid_mapping(engine, gridmapping_fact): (var_name,) = gridmapping_fact rule_name = "fc_provides_grid_mapping" cf_var = engine.cf_var.cf_group[var_name] @@ -162,8 +168,8 @@ def rule_provides_grid_mapping(engine, gridmapping_fact): return rule_name -@rules_function -def rule_provides_coordinate(engine, dimcoord_fact): +@action_function +def action_provides_coordinate(engine, dimcoord_fact): (var_name,) = dimcoord_fact # Identify the coord type @@ -203,6 +209,14 @@ def rule_provides_coordinate(engine, dimcoord_fact): return rule_name +# Lookup table used by 'action_build_dimension_coordinate'. +# Maps each supported coordinate-type name (a rules-internal concept) to a pair +# of information values : +# (@0) the CF grid_mapping_name (or None) +# If set, the cube should have a coord-system, which is set on the +# resulting coordinate. If None, the coord has no coord_system. +# (@1) an (optional) fixed standard-name for the coordinate, or None +# If None, the coordinate name is copied from the source variable _coordtype_to_gridtype_coordname = { "latitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LAT), "longitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LON), @@ -222,8 +236,8 @@ def rule_provides_coordinate(engine, dimcoord_fact): } -@rules_function -def rule_build_coordinate(engine, providescoord_fact): +@action_function +def action_build_dimension_coordinate(engine, providescoord_fact): coord_type, var_name = providescoord_fact cf_var = engine.cf_var.cf_group[var_name] rule_name = f"fc_build_coordinate_{coord_type}" @@ -246,8 +260,8 @@ def rule_build_coordinate(engine, providescoord_fact): return rule_name -@rules_function -def rule_build_auxiliary_coordinate(engine, auxcoord_fact): +@action_function +def action_build_auxiliary_coordinate(engine, auxcoord_fact): (var_name,) = auxcoord_fact rule_name = "fc_build_auxiliary_coordinate" @@ -277,28 +291,28 @@ def rule_build_auxiliary_coordinate(engine, auxcoord_fact): return rule_name -def run_rules(engine): - # default (all cubes) rule, always runs - rule_default(engine) # This should run the default rules. +def run_actions(engine): + # default (all cubes) action, always runs + action_default(engine) # This should run the default rules. # deal with grid-mappings grid_mapping_facts = engine.fact_list("grid_mapping") for grid_mapping_fact in grid_mapping_facts: - rule_provides_grid_mapping(engine, grid_mapping_fact) + action_provides_grid_mapping(engine, grid_mapping_fact) # identify + record aka "PROVIDE" specific named coordinates # N.B. cf.py id-d these as coords NOT aux-coords (stored separately) # TODO: can probably remove this step ?? dimcoord_facts = engine.fact_list("coordinate") for dimcoord_fact in dimcoord_facts: - rule_provides_coordinate(engine, dimcoord_fact) + action_provides_coordinate(engine, dimcoord_fact) # build coordinates providescoord_facts = engine.fact_list("provides-coordinate-(oftype)") for providescoord_fact in providescoord_facts: - rule_build_coordinate(engine, providescoord_fact) + action_build_dimension_coordinate(engine, providescoord_fact) # build aux-coords auxcoord_facts = engine.fact_list("auxiliary_coordinate") for auxcoord_fact in auxcoord_facts: - rule_build_auxiliary_coordinate(engine, auxcoord_fact) + action_build_auxiliary_coordinate(engine, auxcoord_fact) diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index 780858df81..7531345b88 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -1,7 +1,10 @@ """ -A simple mimic of the Pyke 'knwoledge_engine', for interfacing to the routines +A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines in 'iris.fileformats.netcdf' with minimal changes to that code. +This allows us to replace the Pyke rules operation with the simpler pure-Python +translation operations in :mod:`iris.fileformats._nc_load_rules.actions`. + The core of this is the 'Engine' class, which mimics the Pyke engine operations, as used by our code to translate each data cube. @@ -10,7 +13,7 @@ used in :meth:`iris.fileformats.netcdf.pyke_stats`. """ -from .rules import run_rules +from .actions import run_actions class FactList: @@ -74,7 +77,7 @@ def activate(self, rules_base_str=None): in :mod:`iris.fileformats.netcdf._nc_load_rules.rules`. """ - run_rules(self) + run_actions(self) def print_stats(self): """No-op, called by :meth:`iris.fileformats.netcdf.pyke_stats`.""" diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 919c6db6a5..8788290670 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -628,6 +628,8 @@ def test_no_name_cube(self): class TestNetCDFSave(tests.IrisTest): def setUp(self): + iris.fileformats.netcdf.DEBUG = True + iris.fileformats.netcdf.LOAD_PYKE = False self.cubell = iris.cube.Cube( np.arange(4).reshape(2, 2), "air_temperature" ) From 44bf952cfaff56f7bd8e02717d0e089945216b45 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 21 May 2021 15:12:39 +0100 Subject: [PATCH 03/53] Headers, tweaks, reorg test__load_cube. --- .../fileformats/_nc_load_rules/__init__.py | 17 +- .../fileformats/_nc_load_rules/actions.py | 5 + lib/iris/fileformats/_nc_load_rules/engine.py | 5 + .../fileformats/_nc_load_rules/helpers.py | 7 +- .../fileformats/netcdf/load_cube/__init__.py | 6 + .../netcdf/{ => load_cube}/test__load_cube.py | 0 .../load_cube/test__load_cube__activate.py | 161 ++++++++++++++++++ 7 files changed, 199 insertions(+), 2 deletions(-) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py rename lib/iris/tests/unit/fileformats/netcdf/{ => load_cube}/test__load_cube.py (100%) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py diff --git a/lib/iris/fileformats/_nc_load_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py index cfbff5bc7c..baea3cf555 100644 --- a/lib/iris/fileformats/_nc_load_rules/__init__.py +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -1 +1,16 @@ -# Support for replacing Pyke rules. +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Support for cube-specific CF-to-Iris translation operations. + +Interprets CF concepts identified by :mod:`iris.fileformats.cf` to add +components into loaded cubes. + +For now : the API which mimics :class:`pyke.knowledge_engine.engine`. +As this is aiming to replace the old Pyke-based logic rules. +TODO: simplify once the parallel operation with Pyke is no longer required. + +""" diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 5ce683b733..d2ebefc69e 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -1,3 +1,8 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Replacement code for the Pyke rules. diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index 7531345b88..60f956d4d1 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -1,3 +1,8 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines in 'iris.fileformats.netcdf' with minimal changes to that code. diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 9a908a95a1..0ac1cb7472 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -1,3 +1,8 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ All the pure-Python 'helper' functions which previously included in the Pyke rules database. @@ -246,7 +251,7 @@ def _get_ellipsoid(cf_grid_var): ################################################################################ -def build_coordinate_system(cf_grid_var): +def build_coordinate_system(engine, cf_grid_var): """Create a coordinate system from the CF-netCDF grid mapping variable.""" major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py new file mode 100644 index 0000000000..8bc429a906 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py @@ -0,0 +1,6 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.fileformats.netcdf._load_cube` function.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube.py similarity index 100% rename from lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py rename to lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py new file mode 100644 index 0000000000..4afeeac429 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -0,0 +1,161 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +For now, these tests are designed to function with **either** the "old" +Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the +"new" :mod:`iris.fileformats._nc_load_rules`. +Both of those supply an "activate" call (for now : may be simplified in future). + +""" +import iris.tests as tests + +from pathlib import Path +import shutil +import subprocess +import tempfile + +from iris.fileformats.cf import CFReader +import iris.fileformats.netcdf +from iris.fileformats.netcdf import _load_cube +import iris.fileformats._nc_load_rules.engine + +""" +Testing method. +IN cf : "def _load_cube(engine, cf, cf_var, filename)" +WHERE: + - engine is a :class:`pyke.knowledge_engine.engine` + -- **OR** :class:`iris.fileformats._nc_load_rules.engine.Engine` + - cf is a CFReader + - cf_var is a CFDAtaVariable + +As it's hard to construct a suitable CFReader from scratch, it would seem +simpler (for now) to use an ACTUAL FILE. +Likewise, the easiest approach to that is with CDL and "ncgen". +To do this, we need a test "fixture" that can create suitable test files in a +temporary directory. + +""" + + +class Test__grid_mappings(tests.IrisTest): + @classmethod + def setUpClass(cls): + # # Control which testing method we are applying. + # Create a temp directory for temp files. + cls.temp_dirpath = Path(tempfile.mkdtemp()) + + @classmethod + def tearDownClass(cls): + # Destroy a temp directory for temp files. + shutil.rmtree(cls.temp_dirpath) + + def _call_with_testfile(self): + # FOR NOW: absolutely basic example. + cdl_string = r""" + netcdf test { + dimensions: + lats = 2 ; + lons = 3 ; + variables: + double phenom(lats, lons) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + double lats(lats) ; + lats:axis = "Y" ; + lats:units = "degrees_north" ; + lats:standard_name = "latitude" ; + double lons(lons) ; + lons:axis = "X" ; + lons:units = "degrees_east" ; + lons:standard_name = "longitude" ; + } + """ + cdl_string = r""" + netcdf test { + dimensions: + lats = 2 ; + lons = 3 ; + variables: + double phenom(lats, lons) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:grid_mapping = "grid" ; + double lats(lats) ; + lats:axis = "Y" ; + lats:units = "degrees" ; + lats:standard_name = "latitude" ; + double lons(lons) ; + lons:axis = "X" ; + lons:units = "degrees_east" ; + lons:standard_name = "longitude" ; + int grid ; + grid:grid_mapping_name = "latitude_longitude"; + grid:earth_radius = 6.e6 ; + data: + lats = 10., 20. ; + lons = 100., 110., 120. ; + } + """ + cdl_path = str(self.temp_dirpath / "test.cdl") + nc_path = str(self.temp_dirpath / "test.nc") + with open(cdl_path, "w") as f_out: + f_out.write(cdl_string) + # Create reference netCDF file from reference CDL. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + subprocess.check_call(command, shell=True) + + cf = CFReader(nc_path) + # Grab a data variable : FOR NOW, should be only 1 + # (cf_var,) = cf.cf_group.data_variables.values() + cf_var = cf.cf_group.data_variables["phenom"] + + use_pyke = True + if use_pyke: + engine = iris.fileformats.netcdf._pyke_kb_engine_real() + else: + engine = iris.fileformats._nc_load_rules.engine.Engine() + + iris.fileformats.netcdf.DEBUG = True + # iris.fileformats.netcdf.LOAD_PYKE = False + return _load_cube(engine, cf, cf_var, nc_path) + + def _check_result(self, cube): + self.assertEqual(cube.standard_name, "air_temperature") + self.assertEqual(cube.var_name, "phenom") + + def test_latlon(self): + options = {} + result = self._call_with_testfile(**options) + print(result) + print("coord-system = ", type(result.coord_system())) + print(" X cs = ", type(result.coord(axis="x").coord_system)) + print(" Y cs = ", type(result.coord(axis="y").coord_system)) + self._check_result(result, **options) + + +# keep for later ? +_cdl_string = r""" + netcdf test { + dimensions: + latitude = 2 ; + longitude = 3 ; + time = 2 ; + variables: + double phenom(time, latitude) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + float time(time) ; + time:units = "1" ; + time:standard_name = "time" ; + double latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "1" ; + latitude:standard_name = "latitude" ; + } +""" From 5e3cb053f3990435ef3cc4f69963bfd6fe5bc597 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 21 May 2021 16:45:15 +0100 Subject: [PATCH 04/53] Fix handling of coord-systems. --- .../fileformats/_nc_load_rules/actions.py | 24 ++++++++--------- .../load_cube/test__load_cube__activate.py | 26 ++----------------- 2 files changed, 13 insertions(+), 37 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index d2ebefc69e..d23a2373e9 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -245,22 +245,20 @@ def action_provides_coordinate(engine, dimcoord_fact): def action_build_dimension_coordinate(engine, providescoord_fact): coord_type, var_name = providescoord_fact cf_var = engine.cf_var.cf_group[var_name] - rule_name = f"fc_build_coordinate_{coord_type}" + rule_name = f"fc_build_coordinate_({coord_type})" grid_type, coord_name = _coordtype_to_gridtype_coordname[coord_type] - succeed = True coord_system = None if grid_type is not None: - if coord_type not in ("latitude", "longitude"): - # There needs to be the right sort of coordinate system - coord_system = engine.cube_parts.get("coordinate_system") - if coord_system is None: - succeed = False - rule_name += " --FAILED(no coord-system)" - # TODO else: we ***asssume*** coord-system is the right type ?? - if succeed: - hh.build_dimension_coordinate( - engine, cf_var, coord_name=coord_name, coord_system=coord_system - ) + # If a type is identified with a grid, use the coordinate system + # N.B. this requires each grid-type identification to validate the + # coord var (e.g. "is_longitude"). + # Non-conforming lon/lat/projection coords will be classed as + # dim-coords by cf.py, but 'action_provides_coordinate' will give them + # a coord-type of 'miscellaneous' : hence, they have no coord-system. + coord_system = engine.cube_parts.get("coordinate_system") + hh.build_dimension_coordinate( + engine, cf_var, coord_name=coord_name, coord_system=coord_system + ) return rule_name diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 4afeeac429..46228b40ae 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -88,11 +88,11 @@ def _call_with_testfile(self): phenom:grid_mapping = "grid" ; double lats(lats) ; lats:axis = "Y" ; - lats:units = "degrees" ; + lats:units = "degrees_north" ; lats:standard_name = "latitude" ; double lons(lons) ; lons:axis = "X" ; - lons:units = "degrees_east" ; + lons:units = "degrees" ; // THIS IS A BUG! lons:standard_name = "longitude" ; int grid ; grid:grid_mapping_name = "latitude_longitude"; @@ -137,25 +137,3 @@ def test_latlon(self): print(" X cs = ", type(result.coord(axis="x").coord_system)) print(" Y cs = ", type(result.coord(axis="y").coord_system)) self._check_result(result, **options) - - -# keep for later ? -_cdl_string = r""" - netcdf test { - dimensions: - latitude = 2 ; - longitude = 3 ; - time = 2 ; - variables: - double phenom(time, latitude) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - float time(time) ; - time:units = "1" ; - time:standard_name = "time" ; - double latitude(latitude) ; - latitude:axis = "Y" ; - latitude:units = "1" ; - latitude:standard_name = "latitude" ; - } -""" From dfa6074e82e1d80d83288aa0104e91fc03487b51 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 21 May 2021 18:44:08 +0100 Subject: [PATCH 05/53] Remove extra cdl. --- .../load_cube/test__load_cube__activate.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 46228b40ae..de08b470c8 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -57,25 +57,6 @@ def tearDownClass(cls): def _call_with_testfile(self): # FOR NOW: absolutely basic example. - cdl_string = r""" - netcdf test { - dimensions: - lats = 2 ; - lons = 3 ; - variables: - double phenom(lats, lons) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - double lats(lats) ; - lats:axis = "Y" ; - lats:units = "degrees_north" ; - lats:standard_name = "latitude" ; - double lons(lons) ; - lons:axis = "X" ; - lons:units = "degrees_east" ; - lons:standard_name = "longitude" ; - } - """ cdl_string = r""" netcdf test { dimensions: From 1aa4676709fc1d752f6652f7c48e217b5bb43191 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 1 Jun 2021 18:43:45 +0100 Subject: [PATCH 06/53] Add more grid-mapping tests; tidy testcode structure a bit. --- .../fileformats/_nc_load_rules/actions.py | 26 ++- .../load_cube/test__load_cube__activate.py | 179 +++++++++++++++--- 2 files changed, 175 insertions(+), 30 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index d23a2373e9..8dbc637f2a 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -14,10 +14,10 @@ "engine.assert_case_specific_fact(name, args)". 2) this is also used to store intermediate info passed between rules, which - used to done with a "facts_cf.provides" statement in rule actions. + used to be done with a "facts_cf.provides" statement in rule actions. -3) Iris-specific info is stored in our own additional properties stored in - extra properties added to the engine object : +3) Iris-specific info is (still) stored in additional properties created on + the engine object : engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename Our "rules" are just action routines. @@ -26,6 +26,17 @@ explicit way, which doesn't use any clever chaining, "trigger conditions" or other rule-type logic. +Each 'action' function can replace several similar 'rules'. +E.G. 'action_provides_grid_mapping' replaces all 'fc_provides_grid+mapping_'. +To aid debug, each returns a 'rule_name' string, indicating which original rule +this particular action call is emulating : In some cases, this may include a +textual note that this rule 'failed', aka "did not trigger", which would not be +recorded in the original implementation. + +The top-level 'run_actions' ensures that the individual rules actions are +called, with various arguments, as appropriate to ensure the whole cube is +built as it was by the original rules implementation. + TODO: remove the use of intermediate "facts" to carry information between actions. This mimics older behaviour, so is still useful while we are still comparing behaviour with the old Pyke rules (debugging). But once that is no @@ -268,7 +279,9 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): (var_name,) = auxcoord_fact rule_name = "fc_build_auxiliary_coordinate" - # FOR NOW: attempt to identify type, though it only affects rule-name? + # FOR NOW: attempt to identify type + # TODO: can maybe eventually remove this, as it only affects rule_name. + # (but could possibly retain for future debugging purposes) coord_type = "" # unidentified : can be OK if hh.is_time(engine, var_name): coord_type = "time" @@ -300,11 +313,14 @@ def run_actions(engine): # deal with grid-mappings grid_mapping_facts = engine.fact_list("grid_mapping") + # For now, there should be at most *one* of these. + assert len(grid_mapping_facts) in (0, 1) for grid_mapping_fact in grid_mapping_facts: action_provides_grid_mapping(engine, grid_mapping_fact) # identify + record aka "PROVIDE" specific named coordinates - # N.B. cf.py id-d these as coords NOT aux-coords (stored separately) + # N.B. cf.py has identified that these are dim-coords, NOT aux-coords + # (which are recorded separately). # TODO: can probably remove this step ?? dimcoord_facts = engine.fact_list("coordinate") for dimcoord_fact in dimcoord_facts: diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index de08b470c8..396d1d6a5e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -26,7 +26,8 @@ import iris.fileformats._nc_load_rules.engine """ -Testing method. +Notes on testing method. + IN cf : "def _load_cube(engine, cf, cf_var, filename)" WHERE: - engine is a :class:`pyke.knowledge_engine.engine` @@ -43,7 +44,21 @@ """ -class Test__grid_mappings(tests.IrisTest): +class Mixin_Test__nc_load_actions: + """ + Class to make testcases for rules or actions code and check results. + + Defines standard setUp/tearDown-Class to create intermediate files in a + temporary directory. + + Testcase manufacture in _make_testcase_file', based on a simple latlon grid + example with various kwargs to control variations. + Testing in 'test_result', with various kwargs controlling expected results. + + Can also switch between testing Pyke and non-Pyke implementations (for now). + + """ + @classmethod def setUpClass(cls): # # Control which testing method we are applying. @@ -55,10 +70,33 @@ def tearDownClass(cls): # Destroy a temp directory for temp files. shutil.rmtree(cls.temp_dirpath) - def _call_with_testfile(self): - # FOR NOW: absolutely basic example. - cdl_string = r""" - netcdf test { + def make_testcase_cdl( + self, + cdl_path, + latitude_units=None, + gridmapvar_name=None, + gridmapvar_mappropertyname=None, + gridmapvar_missingradius=False, + ): + """ + Write a testcase example into a CDL file. + """ + if latitude_units is None: + latitude_units = "degrees_north" + grid_mapping_name = "grid" + g_varname = gridmapvar_name + g_mapname = gridmapvar_mappropertyname + if g_varname is None: + g_varname = grid_mapping_name + if g_mapname is None: + g_mapname = "grid_mapping_name" + if gridmapvar_missingradius: + g_radius_string = "" + else: + g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" + + cdl_string = f""" + netcdf test {{ dimensions: lats = 2 ; lons = 3 ; @@ -69,34 +107,47 @@ def _call_with_testfile(self): phenom:grid_mapping = "grid" ; double lats(lats) ; lats:axis = "Y" ; - lats:units = "degrees_north" ; + lats:units = "{latitude_units}" ; lats:standard_name = "latitude" ; double lons(lons) ; lons:axis = "X" ; - lons:units = "degrees" ; // THIS IS A BUG! + lons:units = "degrees_east" ; lons:standard_name = "longitude" ; - int grid ; - grid:grid_mapping_name = "latitude_longitude"; - grid:earth_radius = 6.e6 ; + int {g_varname} ; + {g_varname}:{g_mapname} = "latitude_longitude"; + {g_radius_string} data: lats = 10., 20. ; lons = 100., 110., 120. ; - } + }} """ - cdl_path = str(self.temp_dirpath / "test.cdl") - nc_path = str(self.temp_dirpath / "test.nc") + # print('File content:') + # print(cdl_string) + # print('------\n') with open(cdl_path, "w") as f_out: f_out.write(cdl_string) + return cdl_path + + def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=False): + """ + Load the 'phenom' data variable in a CDL testcase, as a cube. + + Using ncgen and the selected _load_cube call. + + FOR NOW: can select whether load uses Pyke (rules) or newer actions + code. + TODO: remove when Pyke implementation is gone. + + """ # Create reference netCDF file from reference CDL. command = "ncgen -o {} {}".format(nc_path, cdl_path) subprocess.check_call(command, shell=True) cf = CFReader(nc_path) # Grab a data variable : FOR NOW, should be only 1 - # (cf_var,) = cf.cf_group.data_variables.values() + cf_var = list(cf.cf_group.data_variables.values())[0] cf_var = cf.cf_group.data_variables["phenom"] - use_pyke = True if use_pyke: engine = iris.fileformats.netcdf._pyke_kb_engine_real() else: @@ -106,15 +157,93 @@ def _call_with_testfile(self): # iris.fileformats.netcdf.LOAD_PYKE = False return _load_cube(engine, cf, cf_var, nc_path) - def _check_result(self, cube): + def _run_testcase(self, **testcase_kwargs): + """ + Run a testcase with chosen optionsm returning a test cube. + + The kwargs apply to the 'make_testcase_cdl' method. + + """ + cdl_path = str(self.temp_dirpath / "test.cdl") + nc_path = cdl_path.replace(".cdl", ".nc") + self.make_testcase_cdl(cdl_path, **testcase_kwargs) + cube = self.create_cube_from_cdl(cdl_path, nc_path) + return cube + + def _check_result(self, cube, cube_no_cs=False, latitude_no_cs=False): + """ + Check key properties of a result cube. + + Various options control the expected things which are tested. + """ self.assertEqual(cube.standard_name, "air_temperature") self.assertEqual(cube.var_name, "phenom") - def test_latlon(self): - options = {} - result = self._call_with_testfile(**options) - print(result) - print("coord-system = ", type(result.coord_system())) - print(" X cs = ", type(result.coord(axis="x").coord_system)) - print(" Y cs = ", type(result.coord(axis="y").coord_system)) - self._check_result(result, **options) + lon_coord = cube.coord("longitude") + lat_coord = cube.coord("latitude") + expected_dim_coords = [lon_coord, lat_coord] + expected_aux_coords = [] + # These are exactly the coords we have. + self.assertEqual( + set(expected_dim_coords), set(cube.coords(dim_coords=True)) + ) + # These are exactly the coords we have. + self.assertEqual( + set(expected_aux_coords), set(cube.coords(dim_coords=False)) + ) + + cube_cs = cube.coord_system() + lat_cs = lat_coord.coord_system + lon_cs = lon_coord.coord_system + if cube_no_cs: + self.assertIsNone(cube_cs) + self.assertIsNone(lat_cs) + self.assertIsNone(lon_cs) + else: + self.assertEqual(lon_cs, cube_cs) + if latitude_no_cs: + self.assertIsNone(lat_cs) + else: + self.assertEqual(lat_cs, cube_cs) + + +class Test__grid_mapping(Mixin_Test__nc_load_actions, tests.IrisTest): + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_basic_latlon(self): + # A basic reference example with a lat-long grid. + result = self._run_testcase() + self._check_result(result) + + def test_missing_latlon_radius(self): + # Lat-long with a missing earth-radius causes an error. + # One of very few cases where activation may encounter an error. + # N.B. doesn't really test rule-activation, but maybe worth doing. + with self.assertRaisesRegex(ValueError, "No ellipsoid"): + self._run_testcase(gridmapvar_missingradius=True) + + def test_bad_gridmapping_nameproperty(self): + # Fix the 'grid' var so it does not register as a grid-mapping. + result = self._run_testcase(gridmapvar_mappropertyname="mappy") + self._check_result(result, cube_no_cs=True) + + def test_latlon_bad_gridmapping_varname(self): + # rename the grid-mapping variable so it is effectively 'missing'. + with self.assertWarnsRegexp("Missing.*grid mapping variable 'grid'"): + result = self._run_testcase(gridmapvar_name="grid_2") + self._check_result(result, cube_no_cs=True) + + def test_latlon_bad_latlon_unit(self): + # Check with bad latitude units : 'degrees' in place of 'degrees_north'. + result = self._run_testcase(latitude_units="degrees") + self._check_result(result, latitude_no_cs=True) + + +if __name__ == "__main__": + tests.main() From 87d334725390bc97f8f7ea7eb5abc5740e59f29a Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 00:25:24 +0100 Subject: [PATCH 07/53] Test more grid types : rotated and some non-latlon (WIP). --- .../load_cube/test__load_cube__activate.py | 177 ++++++++++++++---- 1 file changed, 136 insertions(+), 41 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 396d1d6a5e..115455191e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -20,10 +20,12 @@ import subprocess import tempfile +import iris.coord_systems as ics from iris.fileformats.cf import CFReader import iris.fileformats.netcdf from iris.fileformats.netcdf import _load_cube import iris.fileformats._nc_load_rules.engine +import iris.fileformats._nc_load_rules.helpers as hh """ Notes on testing method. @@ -44,7 +46,7 @@ """ -class Mixin_Test__nc_load_actions: +class Mixin_Test__nc_load_actions(tests.IrisTest): """ Class to make testcases for rules or actions code and check results. @@ -77,12 +79,40 @@ def make_testcase_cdl( gridmapvar_name=None, gridmapvar_mappropertyname=None, gridmapvar_missingradius=False, + mapping_name=None, + use_bad_mapping_params=False, ): """ Write a testcase example into a CDL file. """ - if latitude_units is None: - latitude_units = "degrees_north" + # Grid-mapping options are standard-latlon, rotated, or non-latlon. + # This affects names+units of the X and Y coords. + if mapping_name is None: + # Default grid-mapping and coords are standard lat-lon. + mapping_name = hh.CF_GRID_MAPPING_LAT_LON + xco_name = hh.CF_VALUE_STD_NAME_LON + yco_name = hh.CF_VALUE_STD_NAME_LAT + xco_units = "degrees_east" + # Special cases override some of the values. + if latitude_units is None: + yco_units = "degrees_north" + else: + # Override the latitude units (to invalidate). + yco_units = latitude_units + + elif mapping_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + xco_name = hh.CF_VALUE_STD_NAME_GRID_LON + yco_name = hh.CF_VALUE_STD_NAME_GRID_LAT + xco_units = "degrees" + yco_units = "degrees" + else: + # General non-latlon coordinates + # Exactly which depends on the grid_mapping name. + xco_name = hh.CF_VALUE_STD_NAME_PROJ_X + yco_name = hh.CF_VALUE_STD_NAME_PROJ_Y + xco_units = "m" + yco_units = "m" + grid_mapping_name = "grid" g_varname = gridmapvar_name g_mapname = gridmapvar_mappropertyname @@ -94,41 +124,58 @@ def make_testcase_cdl( g_radius_string = "" else: g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" + g_string = f""" + int {g_varname} ; + {g_varname}:{g_mapname} = "{mapping_name}"; + {g_radius_string} + """ + if use_bad_mapping_params: + if mapping_name == hh.CF_GRID_MAPPING_MERCATOR: + # Mercator mapping with nonzero false-easting is unsupported. + g_string += f""" + {g_varname}:{hh.CF_ATTR_GRID_FALSE_EASTING} = 1.0 ; + """ + elif False: + pass + else: + # Key is only valid for specific grid-mappings. + assert mapping_name in ( + hh.CF_GRID_MAPPING_MERCATOR, + hh.CF_GRID_MAPPING_STEREO, + ) cdl_string = f""" netcdf test {{ dimensions: - lats = 2 ; - lons = 3 ; + yco = 2 ; + xco = 3 ; variables: - double phenom(lats, lons) ; + double phenom(yco, xco) ; phenom:standard_name = "air_temperature" ; phenom:units = "K" ; phenom:grid_mapping = "grid" ; - double lats(lats) ; - lats:axis = "Y" ; - lats:units = "{latitude_units}" ; - lats:standard_name = "latitude" ; - double lons(lons) ; - lons:axis = "X" ; - lons:units = "degrees_east" ; - lons:standard_name = "longitude" ; - int {g_varname} ; - {g_varname}:{g_mapname} = "latitude_longitude"; - {g_radius_string} + double yco(yco) ; + yco:axis = "Y" ; + yco:units = "{yco_units}" ; + yco:standard_name = "{yco_name}" ; + double xco(xco) ; + xco:axis = "X" ; + xco:units = "{xco_units}" ; + xco:standard_name = "{xco_name}" ; + {g_string} data: - lats = 10., 20. ; - lons = 100., 110., 120. ; + yco = 10., 20. ; + xco = 100., 110., 120. ; }} """ - # print('File content:') - # print(cdl_string) - # print('------\n') + print("File content:") + print(cdl_string) + print("------\n") with open(cdl_path, "w") as f_out: f_out.write(cdl_string) return cdl_path - def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=False): + def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=True): """ Load the 'phenom' data variable in a CDL testcase, as a cube. @@ -157,7 +204,7 @@ def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=False): # iris.fileformats.netcdf.LOAD_PYKE = False return _load_cube(engine, cf, cf_var, nc_path) - def _run_testcase(self, **testcase_kwargs): + def run_testcase(self, **testcase_kwargs): """ Run a testcase with chosen optionsm returning a test cube. @@ -168,9 +215,19 @@ def _run_testcase(self, **testcase_kwargs): nc_path = cdl_path.replace(".cdl", ".nc") self.make_testcase_cdl(cdl_path, **testcase_kwargs) cube = self.create_cube_from_cdl(cdl_path, nc_path) + print("\nCube:") + print(cube) + print("") return cube - def _check_result(self, cube, cube_no_cs=False, latitude_no_cs=False): + def check_result( + self, + cube, + cube_cstype=None, + cube_no_cs=False, + cube_no_xycoords=False, + latitude_no_cs=False, + ): """ Check key properties of a result cube. @@ -179,27 +236,43 @@ def _check_result(self, cube, cube_no_cs=False, latitude_no_cs=False): self.assertEqual(cube.standard_name, "air_temperature") self.assertEqual(cube.var_name, "phenom") - lon_coord = cube.coord("longitude") - lat_coord = cube.coord("latitude") - expected_dim_coords = [lon_coord, lat_coord] - expected_aux_coords = [] - # These are exactly the coords we have. + x_coords = cube.coords(axis="x") + y_coords = cube.coords(axis="y") + expected_dim_coords = x_coords + y_coords self.assertEqual( set(expected_dim_coords), set(cube.coords(dim_coords=True)) ) # These are exactly the coords we have. + if cube_no_xycoords: + self.assertEqual(expected_dim_coords, []) + x_coord = None + y_coord = None + else: + self.assertEqual(len(x_coords), 1) + (x_coord,) = x_coords + self.assertEqual(len(y_coords), 1) + (y_coord,) = y_coords + + expected_aux_coords = [] + # These are exactly the coords we have. self.assertEqual( set(expected_aux_coords), set(cube.coords(dim_coords=False)) ) cube_cs = cube.coord_system() - lat_cs = lat_coord.coord_system - lon_cs = lon_coord.coord_system + if cube_no_xycoords: + lat_cs = None + lon_cs = None + else: + lat_cs = y_coord.coord_system + lon_cs = x_coord.coord_system if cube_no_cs: self.assertIsNone(cube_cs) self.assertIsNone(lat_cs) self.assertIsNone(lon_cs) else: + if cube_cstype is not None: + self.assertIsInstance(cube_cs, cube_cstype) self.assertEqual(lon_cs, cube_cs) if latitude_no_cs: self.assertIsNone(lat_cs) @@ -218,31 +291,53 @@ def tearDownClass(cls): def test_basic_latlon(self): # A basic reference example with a lat-long grid. - result = self._run_testcase() - self._check_result(result) + result = self.run_testcase() + self.check_result(result) def test_missing_latlon_radius(self): # Lat-long with a missing earth-radius causes an error. # One of very few cases where activation may encounter an error. # N.B. doesn't really test rule-activation, but maybe worth doing. with self.assertRaisesRegex(ValueError, "No ellipsoid"): - self._run_testcase(gridmapvar_missingradius=True) + self.run_testcase(gridmapvar_missingradius=True) def test_bad_gridmapping_nameproperty(self): # Fix the 'grid' var so it does not register as a grid-mapping. - result = self._run_testcase(gridmapvar_mappropertyname="mappy") - self._check_result(result, cube_no_cs=True) + result = self.run_testcase(gridmapvar_mappropertyname="mappy") + self.check_result(result, cube_no_cs=True) def test_latlon_bad_gridmapping_varname(self): # rename the grid-mapping variable so it is effectively 'missing'. with self.assertWarnsRegexp("Missing.*grid mapping variable 'grid'"): - result = self._run_testcase(gridmapvar_name="grid_2") - self._check_result(result, cube_no_cs=True) + result = self.run_testcase(gridmapvar_name="grid_2") + self.check_result(result, cube_no_cs=True) def test_latlon_bad_latlon_unit(self): # Check with bad latitude units : 'degrees' in place of 'degrees_north'. - result = self._run_testcase(latitude_units="degrees") - self._check_result(result, latitude_no_cs=True) + result = self.run_testcase(latitude_units="degrees") + self.check_result(result, latitude_no_cs=True) + + def test_mapping_rotated(self): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON + ) + self.check_result(result, cube_cstype=ics.RotatedGeogCS) + + def test_mapping_albers(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_ALBERS) + self.check_result(result, cube_cstype=ics.AlbersEqualArea) + + def test_mapping_mercator(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_MERCATOR) + self.check_result(result, cube_cstype=ics.Mercator) + + def test_mapping_mercator__fail_unsupported(self): + with self.assertWarnsRegexp("not yet supported for Mercator"): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_MERCATOR, + use_bad_mapping_params=True, + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) if __name__ == "__main__": From 88a1b2d5cad21acd169cc722088686aec11c66b5 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 10:21:51 +0100 Subject: [PATCH 08/53] Fix 'checker' call usage. --- lib/iris/fileformats/_nc_load_rules/actions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 8dbc637f2a..38b2b96d51 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -162,7 +162,7 @@ def action_provides_grid_mapping(engine, gridmapping_fact): # succeed = False # rule_name += f' --(FAILED is_grid_mapping)' if succeed: - if checker is not None and not checker(engine, grid_mapping_type): + if checker is not None and not checker(engine, var_name): succeed = False rule_name += f" --(FAILED check {checker.__name__})" From ad3ec2e3006d6517dfc72497e90684e702128410 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 10:41:49 +0100 Subject: [PATCH 09/53] Reorganise testcode inheritance to provide testclasses for pyke/nonpyke. Replace 'bad mapping' control with scale-factor key. **NOTE** non-pyke bad mercatorhas outstanding test failures -- to be fixed. --- .../load_cube/test__load_cube__activate.py | 129 ++++++++++++------ 1 file changed, 86 insertions(+), 43 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 115455191e..3942f4bcb6 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -46,21 +46,45 @@ """ -class Mixin_Test__nc_load_actions(tests.IrisTest): +class Mixin_Test__nc_load_actions: """ - Class to make testcases for rules or actions code and check results. + Class to make testcases for rules or actions code, and check results. - Defines standard setUp/tearDown-Class to create intermediate files in a - temporary directory. + Defines standard setUpClass/tearDownClass methods, to create a temporary + directory for intermediate files. + NOTE: owing to peculiarities of unittest, these must be explicitly called + from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the + actual Test_XXX class which also inherits unittest.TestCase. - Testcase manufacture in _make_testcase_file', based on a simple latlon grid - example with various kwargs to control variations. - Testing in 'test_result', with various kwargs controlling expected results. + Testcases are manufactured by the '_make_testcase_cdl' method. + These are based on a 'standard simple latlon grid' example. + Various kwargs control variations on this. - Can also switch between testing Pyke and non-Pyke implementations (for now). + The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes + a result cube (by: producing cdl, converting to netcdf, and loading). + + The 'check_result' method performs various checks on the result, with + kwargs controlling the expected properties to be tested against. + This usage is *also* based on the 'standard simple latlon grid' example, + the kwargs specify expected differences from that. + + Can also test with either the Pyke(rules) or non-Pyke (actions) + implementations (for now). """ + # + # "global" test settings + # + + # whether to test 'rules' or 'actions' implementations + # TODO: remove when Pyke is gone + use_pyke = True + + # whether to output various debug info + # TODO: ?possibly? remove when development is complete + debug = False + @classmethod def setUpClass(cls): # # Control which testing method we are applying. @@ -72,7 +96,7 @@ def tearDownClass(cls): # Destroy a temp directory for temp files. shutil.rmtree(cls.temp_dirpath) - def make_testcase_cdl( + def _make_testcase_cdl( self, cdl_path, latitude_units=None, @@ -80,7 +104,7 @@ def make_testcase_cdl( gridmapvar_mappropertyname=None, gridmapvar_missingradius=False, mapping_name=None, - use_bad_mapping_params=False, + mapping_scalefactor=None, ): """ Write a testcase example into a CDL file. @@ -129,20 +153,13 @@ def make_testcase_cdl( {g_varname}:{g_mapname} = "{mapping_name}"; {g_radius_string} """ - if use_bad_mapping_params: - if mapping_name == hh.CF_GRID_MAPPING_MERCATOR: - # Mercator mapping with nonzero false-easting is unsupported. - g_string += f""" - {g_varname}:{hh.CF_ATTR_GRID_FALSE_EASTING} = 1.0 ; - """ - elif False: - pass - else: - # Key is only valid for specific grid-mappings. - assert mapping_name in ( - hh.CF_GRID_MAPPING_MERCATOR, - hh.CF_GRID_MAPPING_STEREO, - ) + if mapping_scalefactor is not None: + # Add a specific scale-factor term to the grid mappinf. + # Non-unity scale not supported for Mercator/Stereographic. + sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN + g_string += f""" + {g_varname}:{sfapo_name} = {mapping_scalefactor} ; + """ cdl_string = f""" netcdf test {{ @@ -168,23 +185,20 @@ def make_testcase_cdl( xco = 100., 110., 120. ; }} """ - print("File content:") - print(cdl_string) - print("------\n") + if self.debug: + print("File content:") + print(cdl_string) + print("------\n") with open(cdl_path, "w") as f_out: f_out.write(cdl_string) return cdl_path - def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=True): + def _load_cube_from_cdl(self, cdl_path, nc_path): """ Load the 'phenom' data variable in a CDL testcase, as a cube. Using ncgen and the selected _load_cube call. - FOR NOW: can select whether load uses Pyke (rules) or newer actions - code. - TODO: remove when Pyke implementation is gone. - """ # Create reference netCDF file from reference CDL. command = "ncgen -o {} {}".format(nc_path, cdl_path) @@ -195,12 +209,12 @@ def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=True): cf_var = list(cf.cf_group.data_variables.values())[0] cf_var = cf.cf_group.data_variables["phenom"] - if use_pyke: + if self.use_pyke: engine = iris.fileformats.netcdf._pyke_kb_engine_real() else: engine = iris.fileformats._nc_load_rules.engine.Engine() - iris.fileformats.netcdf.DEBUG = True + iris.fileformats.netcdf.DEBUG = self.debug # iris.fileformats.netcdf.LOAD_PYKE = False return _load_cube(engine, cf, cf_var, nc_path) @@ -208,16 +222,17 @@ def run_testcase(self, **testcase_kwargs): """ Run a testcase with chosen optionsm returning a test cube. - The kwargs apply to the 'make_testcase_cdl' method. + The kwargs apply to the '_make_testcase_cdl' method. """ cdl_path = str(self.temp_dirpath / "test.cdl") nc_path = cdl_path.replace(".cdl", ".nc") - self.make_testcase_cdl(cdl_path, **testcase_kwargs) - cube = self.create_cube_from_cdl(cdl_path, nc_path) - print("\nCube:") - print(cube) - print("") + self._make_testcase_cdl(cdl_path, **testcase_kwargs) + cube = self._load_cube_from_cdl(cdl_path, nc_path) + if self.debug: + print("\nCube:") + print(cube) + print("") return cube def check_result( @@ -280,7 +295,8 @@ def check_result( self.assertEqual(lat_cs, cube_cs) -class Test__grid_mapping(Mixin_Test__nc_load_actions, tests.IrisTest): +class Mixin__grid_mapping(Mixin_Test__nc_load_actions): + # Various tests for translation of grid=mappings @classmethod def setUpClass(cls): super().setUpClass() @@ -297,7 +313,7 @@ def test_basic_latlon(self): def test_missing_latlon_radius(self): # Lat-long with a missing earth-radius causes an error. # One of very few cases where activation may encounter an error. - # N.B. doesn't really test rule-activation, but maybe worth doing. + # N.B. doesn't really test rules-activation, but maybe worth doing. with self.assertRaisesRegex(ValueError, "No ellipsoid"): self.run_testcase(gridmapvar_missingradius=True) @@ -333,12 +349,39 @@ def test_mapping_mercator(self): def test_mapping_mercator__fail_unsupported(self): with self.assertWarnsRegexp("not yet supported for Mercator"): + # Set a non-unity scale factor, which mercator cannot handle. result = self.run_testcase( mapping_name=hh.CF_GRID_MAPPING_MERCATOR, - use_bad_mapping_params=True, + mapping_scalefactor=2.0, ) self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) +class Test__grid_mapping__pyke_rules(Mixin__grid_mapping, tests.IrisTest): + # Various tests for translation of grid=mappings + use_pyke = True + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +class Test__grid_mapping__nonpyke_actions(Mixin__grid_mapping, tests.IrisTest): + # Various tests for translation of grid=mappings + use_pyke = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + if __name__ == "__main__": tests.main() From b58c4bc553ce398a282ba70856fcdb9c1dd89f07 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 12:32:42 +0100 Subject: [PATCH 10/53] Tests for all supported grid-mappings. Note which rules trigger in each case. --- .../load_cube/test__load_cube__activate.py | 201 +++++++++++++++++- 1 file changed, 191 insertions(+), 10 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 3942f4bcb6..44c1e55dc8 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -125,10 +125,12 @@ def _make_testcase_cdl( yco_units = latitude_units elif mapping_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + # Rotated lat-lon coordinates. xco_name = hh.CF_VALUE_STD_NAME_GRID_LON yco_name = hh.CF_VALUE_STD_NAME_GRID_LAT xco_units = "degrees" yco_units = "degrees" + else: # General non-latlon coordinates # Exactly which depends on the grid_mapping name. @@ -153,6 +155,8 @@ def _make_testcase_cdl( {g_varname}:{g_mapname} = "{mapping_name}"; {g_radius_string} """ + + # Add a specified scale-factor if keyword is set if mapping_scalefactor is not None: # Add a specific scale-factor term to the grid mappinf. # Non-unity scale not supported for Mercator/Stereographic. @@ -161,6 +165,54 @@ def _make_testcase_cdl( {g_varname}:{sfapo_name} = {mapping_scalefactor} ; """ + # + # Add various minimal required properties for different grid mappings + # + + # Those which require 'latitude of projection origin' + if mapping_name in ( + hh.CF_GRID_MAPPING_TRANSVERSE, + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{latpo_name} = 0.0 ; + """ + # Those which require 'longitude of projection origin' + if mapping_name in ( + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{lonpo_name} = 0.0 ; + """ + # Those which require 'longitude of central meridian' + if mapping_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN + g_string += f""" + {g_varname}:{latcm_name} = 0.0 ; + """ + # Those which require 'perspective point height' + if mapping_name in ( + hh.CF_GRID_MAPPING_VERTICAL, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + ): + pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT + g_string += f""" + {g_varname}:{pph_name} = 600000.0 ; + """ + # Those which require 'sweep angle axis' + if mapping_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS + g_string += f""" + {g_varname}:{saa_name} = "y" ; + """ + + # Construct the total CDL string cdl_string = f""" netcdf test {{ dimensions: @@ -296,17 +348,21 @@ def check_result( class Mixin__grid_mapping(Mixin_Test__nc_load_actions): - # Various tests for translation of grid=mappings - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() + # Various testcases for translation of grid-mappings def test_basic_latlon(self): # A basic reference example with a lat-long grid. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude + # 006 : fc_build_coordinate_longitude + # Notes: + # grid-mapping: regular latlon + # dim-coords: lat+lon + # coords-build: standard latlon coords (with latlon coord-system) result = self.run_testcase() self.check_result(result) @@ -314,40 +370,137 @@ def test_missing_latlon_radius(self): # Lat-long with a missing earth-radius causes an error. # One of very few cases where activation may encounter an error. # N.B. doesn't really test rules-activation, but maybe worth doing. + # (no rules trigger) with self.assertRaisesRegex(ValueError, "No ellipsoid"): self.run_testcase(gridmapvar_missingradius=True) def test_bad_gridmapping_nameproperty(self): # Fix the 'grid' var so it does not register as a grid-mapping. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_latitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_latitude_nocs + # 005 : fc_build_coordinate_longitude_nocs + # Notes: + # grid-mapping: NONE + # dim-coords: lat+lon + # coords-build: latlon coords NO coord-system result = self.run_testcase(gridmapvar_mappropertyname="mappy") self.check_result(result, cube_no_cs=True) def test_latlon_bad_gridmapping_varname(self): # rename the grid-mapping variable so it is effectively 'missing'. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_latitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_latitude_nocs + # 005 : fc_build_coordinate_longitude_nocs + # Notes: + # no coord-system + # all the same as test_bad_gridmapping_nameproperty with self.assertWarnsRegexp("Missing.*grid mapping variable 'grid'"): result = self.run_testcase(gridmapvar_name="grid_2") self.check_result(result, cube_no_cs=True) def test_latlon_bad_latlon_unit(self): # Check with bad latitude units : 'degrees' in place of 'degrees_north'. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_longitude + # 005 : fc_default_coordinate + # Notes: + # grid-mapping: regular latlon + # dim-coords: + # x is regular longitude dim-coord + # y is 'default' coord ==> builds as an 'extra' dim-coord + # coords-build: + # x(lon) is regular latlon with coord-system + # y(lat) is a dim-coord, but NO coord-system result = self.run_testcase(latitude_units="degrees") self.check_result(result, latitude_no_cs=True) def test_mapping_rotated(self): + # Test with rotated-latlon grid-mapping + # Distinct from both regular-latlon and non-latlon cases, as the + # coordinate standard names and units are different. + # (run_testcase/_make_testcase_cdl know how to handle that). + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude_rotated + # 006 : fc_build_coordinate_longitude_rotated + # Notes: + # grid-mapping: rotated lat-lon + # dim-coords: lat+lon + # coords-build: lat+lon coords ROTATED, with coord-system + # (rotated means different name + units) result = self.run_testcase( mapping_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON ) self.check_result(result, cube_cstype=ics.RotatedGeogCS) + # + # All non-latlon coordinate systems ... + # These all have projection-x/y coordinates with units of metres + # They all work the same way, except that Mercator/Stereographic have + # parameter checking routines that can fail. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_ + # 003 : fc_provides_projection_x_coordinate + # 004 : fc_provides_projection_y_coordinate + # 005 : fc_build_coordinate_projection_x_ + # 006 : fc_build_coordinate_projection_y_ + # Notes: + # grid-mapping: + # dim-coords: proj-x and -y + # coords-build: proj-x/-y_, with coord-system + def test_mapping_albers(self): result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_ALBERS) self.check_result(result, cube_cstype=ics.AlbersEqualArea) + def test_mapping_geostationary(self): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_GEOSTATIONARY + ) + self.check_result(result, cube_cstype=ics.Geostationary) + + def test_mapping_lambert_azimuthal(self): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL + ) + self.check_result(result, cube_cstype=ics.LambertAzimuthalEqualArea) + + def test_mapping_lambert_conformal(self): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL + ) + self.check_result(result, cube_cstype=ics.LambertConformal) + def test_mapping_mercator(self): result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_MERCATOR) self.check_result(result, cube_cstype=ics.Mercator) def test_mapping_mercator__fail_unsupported(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_projection_x_coordinate + # 003 : fc_provides_projection_y_coordinate + # Notes: + # grid-mapping: NONE + # dim-coords: proj-x and -y + # coords-build: NONE + # = NO coord-system + # = NO dim-coords built (cube has no coords) with self.assertWarnsRegexp("not yet supported for Mercator"): # Set a non-unity scale factor, which mercator cannot handle. result = self.run_testcase( @@ -356,9 +509,37 @@ def test_mapping_mercator__fail_unsupported(self): ) self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + def test_mapping_stereographic(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_STEREO) + self.check_result(result, cube_cstype=ics.Stereographic) + + def test_mapping_stereographic__fail_unsupported(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_projection_x_coordinate + # 003 : fc_provides_projection_y_coordinate + # Notes: + # as for 'mercator__fail_unsupported', above + # = NO dim-coords built (cube has no coords) + with self.assertWarnsRegexp("not yet supported for stereographic"): + # Set a non-unity scale factor, which stereo cannot handle. + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_STEREO, + mapping_scalefactor=2.0, + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping_transverse_mercator(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_TRANSVERSE) + self.check_result(result, cube_cstype=ics.TransverseMercator) + + def test_mapping_vertical_perspective(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_VERTICAL) + self.check_result(result, cube_cstype=ics.VerticalPerspective) + class Test__grid_mapping__pyke_rules(Mixin__grid_mapping, tests.IrisTest): - # Various tests for translation of grid=mappings + # Run grid-mapping tests with Pyke (rules) use_pyke = True @classmethod @@ -371,7 +552,7 @@ def tearDownClass(cls): class Test__grid_mapping__nonpyke_actions(Mixin__grid_mapping, tests.IrisTest): - # Various tests for translation of grid=mappings + # Run grid-mapping tests with non-Pyke (actions) use_pyke = False @classmethod From 04b079a20dd159a89e91427294ff839bb47ed95c Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 14:23:44 +0100 Subject: [PATCH 11/53] Disable testing against non-pyke code, for now. --- .../fileformats/netcdf/load_cube/test__load_cube__activate.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 44c1e55dc8..62fe79a47f 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -551,6 +551,10 @@ def tearDownClass(cls): super().tearDownClass() +from unittest import skip + + +@skip class Test__grid_mapping__nonpyke_actions(Mixin__grid_mapping, tests.IrisTest): # Run grid-mapping tests with non-Pyke (actions) use_pyke = False From 1e0833e2d3440748cedac8ef0b5aaf95f04202f5 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 14:42:25 +0100 Subject: [PATCH 12/53] Revert old tests/test_netcdf.py --- lib/iris/tests/test_netcdf.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 8788290670..4d92274fcf 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -41,13 +41,9 @@ @tests.skip_data class TestNetCDFLoad(tests.IrisTest): def setUp(self): - iris.fileformats.netcdf.DEBUG = True - iris.fileformats.netcdf.LOAD_PYKE = False self.tmpdir = None def tearDown(self): - iris.fileformats.netcdf.DEBUG = False - iris.fileformats.netcdf.LOAD_PYKE = True if self.tmpdir is not None: shutil.rmtree(self.tmpdir) @@ -131,24 +127,11 @@ def test_load_global_xyzt_gems_iter(self): def test_load_rotated_xy_land(self): # Test loading single xy rotated pole CF-netCDF file. - iris.fileformats.netcdf.LOAD_PYKE = True - print("Pyke version:") cube = iris.load_cube( tests.get_data_path( ("NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc") ) ) - print(cube) - iris.fileformats.netcdf.LOAD_PYKE = False - print("") - print("NON-Pyke version:") - cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc") - ) - ) - print(cube) - # Make sure the AuxCoords have lazy data. self.assertTrue(is_lazy_data(cube.coord("latitude").core_points())) self.assertCML(cube, ("netcdf", "netcdf_rotated_xy_land.cml")) @@ -628,8 +611,6 @@ def test_no_name_cube(self): class TestNetCDFSave(tests.IrisTest): def setUp(self): - iris.fileformats.netcdf.DEBUG = True - iris.fileformats.netcdf.LOAD_PYKE = False self.cubell = iris.cube.Cube( np.arange(4).reshape(2, 2), "air_temperature" ) From b119c7b675c44ef03c16ec70d7c848dbcbd31502 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 15:17:41 +0100 Subject: [PATCH 13/53] Odd clarifying comments. --- .../load_cube/test__load_cube__activate.py | 41 +++++++++++++++---- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 62fe79a47f..cd62c9b481 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -108,16 +108,29 @@ def _make_testcase_cdl( ): """ Write a testcase example into a CDL file. + + This is the "master" routine for creating all our testcases. + Kwarg options modify a simple default testcase with a latlon grid. + The routine handles the various testcase options and their possible + interactions. This includes knowing what extra changes are required + to support different grid-mapping types (for example). + """ - # Grid-mapping options are standard-latlon, rotated, or non-latlon. + # The grid-mapping options are standard-latlon, rotated, or non-latlon. # This affects names+units of the X and Y coords. + # We don't have an option to *not* include a grid-mapping variable, but + # we can mimic a missing grid-mapping by changing the varname from that + # which the data-variable refers to, with "gridmapvar_name=xxx". + # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by + # selecting an unkown 'grid_mapping_name' property, with + # "gridmapvar_mappropertyname=xxx". if mapping_name is None: # Default grid-mapping and coords are standard lat-lon. mapping_name = hh.CF_GRID_MAPPING_LAT_LON xco_name = hh.CF_VALUE_STD_NAME_LON yco_name = hh.CF_VALUE_STD_NAME_LAT xco_units = "degrees_east" - # Special cases override some of the values. + # Special kwarg overrides some of the values. if latitude_units is None: yco_units = "degrees_north" else: @@ -140,12 +153,16 @@ def _make_testcase_cdl( yco_units = "m" grid_mapping_name = "grid" + # Options can override the gridvar name, and its 'grid+mapping_name' + # property. g_varname = gridmapvar_name g_mapname = gridmapvar_mappropertyname if g_varname is None: g_varname = grid_mapping_name if g_mapname is None: g_mapname = "grid_mapping_name" + + # Omit the earth radius, if requested. if gridmapvar_missingradius: g_radius_string = "" else: @@ -156,17 +173,18 @@ def _make_testcase_cdl( {g_radius_string} """ - # Add a specified scale-factor if keyword is set + # Add a specified scale-factor, if requested. if mapping_scalefactor is not None: - # Add a specific scale-factor term to the grid mappinf. - # Non-unity scale not supported for Mercator/Stereographic. + # Add a specific scale-factor term to the grid mapping. + # (Non-unity scale is not supported for Mercator/Stereographic). sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN g_string += f""" {g_varname}:{sfapo_name} = {mapping_scalefactor} ; """ # - # Add various minimal required properties for different grid mappings + # Add various additional (minimal) required properties for different + # grid mapping types. # # Those which require 'latitude of projection origin' @@ -390,7 +408,8 @@ def test_bad_gridmapping_nameproperty(self): self.check_result(result, cube_no_cs=True) def test_latlon_bad_gridmapping_varname(self): - # rename the grid-mapping variable so it is effectively 'missing'. + # rename the grid-mapping variable so it is effectively 'missing' + # (I.E. the var named in "data-variable:grid_mapping" does not exist). # Rules Triggered: # 001 : fc_default # 002 : fc_provides_coordinate_latitude @@ -421,6 +440,8 @@ def test_latlon_bad_latlon_unit(self): # coords-build: # x(lon) is regular latlon with coord-system # y(lat) is a dim-coord, but NO coord-system + # = "fc_provides_coordinate_latitude" does not trigger, because it is + # not a valid latitude coordinate. result = self.run_testcase(latitude_units="degrees") self.check_result(result, latitude_no_cs=True) @@ -449,9 +470,13 @@ def test_mapping_rotated(self): # # All non-latlon coordinate systems ... - # These all have projection-x/y coordinates with units of metres + # These all have projection-x/y coordinates with units of metres. # They all work the same way, except that Mercator/Stereographic have # parameter checking routines that can fail. + # NOTE: various mapping types *require* certain addtional properties + # - without which an error will occur during translation. + # - run_testcase/_make_testcase_cdl know how to provide these + # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_grid_mapping_ From 364ea5bf7fd1bc3ef58d99600108d6811dfdf690 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 15:49:26 +0100 Subject: [PATCH 14/53] Tidy testcase creation a bit. --- .../load_cube/test__load_cube__activate.py | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index cd62c9b481..5cee07678e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -98,7 +98,6 @@ def tearDownClass(cls): def _make_testcase_cdl( self, - cdl_path, latitude_units=None, gridmapvar_name=None, gridmapvar_mappropertyname=None, @@ -107,7 +106,7 @@ def _make_testcase_cdl( mapping_scalefactor=None, ): """ - Write a testcase example into a CDL file. + Create a CDL string for a testcase. This is the "master" routine for creating all our testcases. Kwarg options modify a simple default testcase with a latlon grid. @@ -259,21 +258,26 @@ def _make_testcase_cdl( print("File content:") print(cdl_string) print("------\n") - with open(cdl_path, "w") as f_out: - f_out.write(cdl_string) - return cdl_path + return cdl_string - def _load_cube_from_cdl(self, cdl_path, nc_path): + def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): """ Load the 'phenom' data variable in a CDL testcase, as a cube. - Using ncgen and the selected _load_cube call. + Using ncgen, CFReader and the _load_cube call. + Can use a genuine Pyke engine, or the actions mimic engine, + selected by `self.use_pyke`. """ - # Create reference netCDF file from reference CDL. + # Write the CDL to a file. + with open(cdl_path, "w") as f_out: + f_out.write(cdl_string) + + # Create a netCDF file from the CDL file. command = "ncgen -o {} {}".format(nc_path, cdl_path) subprocess.check_call(command, shell=True) + # Simulate the inner part of the file reading process. cf = CFReader(nc_path) # Grab a data variable : FOR NOW, should be only 1 cf_var = list(cf.cf_group.data_variables.values())[0] @@ -290,15 +294,15 @@ def _load_cube_from_cdl(self, cdl_path, nc_path): def run_testcase(self, **testcase_kwargs): """ - Run a testcase with chosen optionsm returning a test cube. + Run a testcase with chosen options, returning a test cube. The kwargs apply to the '_make_testcase_cdl' method. """ cdl_path = str(self.temp_dirpath / "test.cdl") nc_path = cdl_path.replace(".cdl", ".nc") - self._make_testcase_cdl(cdl_path, **testcase_kwargs) - cube = self._load_cube_from_cdl(cdl_path, nc_path) + cdl_string = self._make_testcase_cdl(**testcase_kwargs) + cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) if self.debug: print("\nCube:") print(cube) From 448796139c2e4cadb1cbc049f12a0bca866fddd2 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 18:20:21 +0100 Subject: [PATCH 15/53] Tests for mapping-types mismatch between coords+grid-mapping; Test nonmonotonic dimcoord values; Test warnings (or not). --- .../load_cube/test__load_cube__activate.py | 452 +++++++++++++++--- 1 file changed, 380 insertions(+), 72 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 5cee07678e..4d40370e56 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -101,9 +101,14 @@ def _make_testcase_cdl( latitude_units=None, gridmapvar_name=None, gridmapvar_mappropertyname=None, - gridmapvar_missingradius=False, - mapping_name=None, + mapping_missingradius=False, + mapping_type_name=None, mapping_scalefactor=None, + yco_values=None, + xco_name=None, + yco_name=None, + xco_units=None, + yco_units=None, ): """ Create a CDL string for a testcase. @@ -123,52 +128,62 @@ def _make_testcase_cdl( # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by # selecting an unkown 'grid_mapping_name' property, with # "gridmapvar_mappropertyname=xxx". - if mapping_name is None: + if mapping_type_name is None: # Default grid-mapping and coords are standard lat-lon. - mapping_name = hh.CF_GRID_MAPPING_LAT_LON - xco_name = hh.CF_VALUE_STD_NAME_LON - yco_name = hh.CF_VALUE_STD_NAME_LAT - xco_units = "degrees_east" + mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON + xco_name_default = hh.CF_VALUE_STD_NAME_LON + yco_name_default = hh.CF_VALUE_STD_NAME_LAT + xco_units_default = "degrees_east" # Special kwarg overrides some of the values. if latitude_units is None: - yco_units = "degrees_north" + yco_units_default = "degrees_north" else: # Override the latitude units (to invalidate). - yco_units = latitude_units + yco_units_default = latitude_units - elif mapping_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: # Rotated lat-lon coordinates. - xco_name = hh.CF_VALUE_STD_NAME_GRID_LON - yco_name = hh.CF_VALUE_STD_NAME_GRID_LAT - xco_units = "degrees" - yco_units = "degrees" + xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON + yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT + xco_units_default = "degrees" + yco_units_default = "degrees" else: # General non-latlon coordinates # Exactly which depends on the grid_mapping name. - xco_name = hh.CF_VALUE_STD_NAME_PROJ_X - yco_name = hh.CF_VALUE_STD_NAME_PROJ_Y - xco_units = "m" - yco_units = "m" + xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X + yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y + xco_units_default = "m" + yco_units_default = "m" + + # Options can override coord (standard) names and units. + if xco_name is None: + xco_name = xco_name_default + if yco_name is None: + yco_name = yco_name_default + if xco_units is None: + xco_units = xco_units_default + if yco_units is None: + yco_units = yco_units_default grid_mapping_name = "grid" - # Options can override the gridvar name, and its 'grid+mapping_name' - # property. + # Options can override the gridvar name and properties. g_varname = gridmapvar_name g_mapname = gridmapvar_mappropertyname if g_varname is None: g_varname = grid_mapping_name if g_mapname is None: + # If you change this, it is no longer a valid grid-mapping var. g_mapname = "grid_mapping_name" # Omit the earth radius, if requested. - if gridmapvar_missingradius: + if mapping_missingradius: g_radius_string = "" else: g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" g_string = f""" int {g_varname} ; - {g_varname}:{g_mapname} = "{mapping_name}"; + {g_varname}:{g_mapname} = "{mapping_type_name}"; {g_radius_string} """ @@ -187,7 +202,7 @@ def _make_testcase_cdl( # # Those which require 'latitude of projection origin' - if mapping_name in ( + if mapping_type_name in ( hh.CF_GRID_MAPPING_TRANSVERSE, hh.CF_GRID_MAPPING_STEREO, hh.CF_GRID_MAPPING_GEOSTATIONARY, @@ -198,7 +213,7 @@ def _make_testcase_cdl( {g_varname}:{latpo_name} = 0.0 ; """ # Those which require 'longitude of projection origin' - if mapping_name in ( + if mapping_type_name in ( hh.CF_GRID_MAPPING_STEREO, hh.CF_GRID_MAPPING_GEOSTATIONARY, hh.CF_GRID_MAPPING_VERTICAL, @@ -208,13 +223,13 @@ def _make_testcase_cdl( {g_varname}:{lonpo_name} = 0.0 ; """ # Those which require 'longitude of central meridian' - if mapping_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN g_string += f""" {g_varname}:{latcm_name} = 0.0 ; """ # Those which require 'perspective point height' - if mapping_name in ( + if mapping_type_name in ( hh.CF_GRID_MAPPING_VERTICAL, hh.CF_GRID_MAPPING_GEOSTATIONARY, ): @@ -223,12 +238,18 @@ def _make_testcase_cdl( {g_varname}:{pph_name} = 600000.0 ; """ # Those which require 'sweep angle axis' - if mapping_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS g_string += f""" {g_varname}:{saa_name} = "y" ; """ + # y-coord values + if yco_values is None: + yco_values = [10.0, 20.0] + yco_value_strings = [str(val) for val in yco_values] + yco_values_string = ", ".join(yco_value_strings) + # Construct the total CDL string cdl_string = f""" netcdf test {{ @@ -250,7 +271,7 @@ def _make_testcase_cdl( xco:standard_name = "{xco_name}" ; {g_string} data: - yco = 10., 20. ; + yco = {yco_values_string} ; xco = 100., 110., 120. ; }} """ @@ -292,7 +313,7 @@ def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): # iris.fileformats.netcdf.LOAD_PYKE = False return _load_cube(engine, cf, cf_var, nc_path) - def run_testcase(self, **testcase_kwargs): + def run_testcase(self, warning=None, **testcase_kwargs): """ Run a testcase with chosen options, returning a test cube. @@ -302,7 +323,12 @@ def run_testcase(self, **testcase_kwargs): cdl_path = str(self.temp_dirpath / "test.cdl") nc_path = cdl_path.replace(".cdl", ".nc") cdl_string = self._make_testcase_cdl(**testcase_kwargs) - cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) + if warning is None: + context = self.assertNoWarningsRegexp() + else: + context = self.assertWarnsRegexp(warning) + with context: + cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) if self.debug: print("\nCube:") print(cube) @@ -315,7 +341,11 @@ def check_result( cube_cstype=None, cube_no_cs=False, cube_no_xycoords=False, - latitude_no_cs=False, + xco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_is_aux=False, + xco_stdname=True, + yco_stdname=True, ): """ Check key properties of a result cube. @@ -325,13 +355,18 @@ def check_result( self.assertEqual(cube.standard_name, "air_temperature") self.assertEqual(cube.var_name, "phenom") - x_coords = cube.coords(axis="x") - y_coords = cube.coords(axis="y") - expected_dim_coords = x_coords + y_coords + x_coords = cube.coords(dimensions=(1,)) + y_coords = cube.coords(dimensions=(0,)) + if yco_is_aux: + expected_dim_coords = x_coords + expected_aux_coords = y_coords + else: + expected_dim_coords = x_coords + y_coords + expected_aux_coords = [] + self.assertEqual( set(expected_dim_coords), set(cube.coords(dim_coords=True)) ) - # These are exactly the coords we have. if cube_no_xycoords: self.assertEqual(expected_dim_coords, []) x_coord = None @@ -342,31 +377,54 @@ def check_result( self.assertEqual(len(y_coords), 1) (y_coord,) = y_coords - expected_aux_coords = [] - # These are exactly the coords we have. self.assertEqual( set(expected_aux_coords), set(cube.coords(dim_coords=False)) ) + if x_coord: + if xco_stdname is None: + # no check + pass + elif xco_stdname is True: + self.assertIsNotNone(x_coord.standard_name) + elif xco_stdname is False: + self.assertIsNone(x_coord.standard_name) + else: + self.assertEqual(x_coord.standard_name, xco_stdname) + + if y_coord: + if yco_stdname is None: + # no check + pass + if yco_stdname is True: + self.assertIsNotNone(y_coord.standard_name) + elif yco_stdname is False: + self.assertIsNone(y_coord.standard_name) + else: + self.assertEqual(y_coord.standard_name, yco_stdname) + cube_cs = cube.coord_system() if cube_no_xycoords: - lat_cs = None - lon_cs = None + yco_cs = None + xco_cs = None else: - lat_cs = y_coord.coord_system - lon_cs = x_coord.coord_system + yco_cs = y_coord.coord_system + xco_cs = x_coord.coord_system if cube_no_cs: self.assertIsNone(cube_cs) - self.assertIsNone(lat_cs) - self.assertIsNone(lon_cs) + self.assertIsNone(yco_cs) + self.assertIsNone(xco_cs) else: if cube_cstype is not None: self.assertIsInstance(cube_cs, cube_cstype) - self.assertEqual(lon_cs, cube_cs) - if latitude_no_cs: - self.assertIsNone(lat_cs) + if xco_no_cs: + self.assertIsNone(xco_cs) + else: + self.assertEqual(xco_cs, cube_cs) + if yco_no_cs: + self.assertIsNone(yco_cs) else: - self.assertEqual(lat_cs, cube_cs) + self.assertEqual(yco_cs, cube_cs) class Mixin__grid_mapping(Mixin_Test__nc_load_actions): @@ -394,7 +452,7 @@ def test_missing_latlon_radius(self): # N.B. doesn't really test rules-activation, but maybe worth doing. # (no rules trigger) with self.assertRaisesRegex(ValueError, "No ellipsoid"): - self.run_testcase(gridmapvar_missingradius=True) + self.run_testcase(mapping_missingradius=True) def test_bad_gridmapping_nameproperty(self): # Fix the 'grid' var so it does not register as a grid-mapping. @@ -423,8 +481,8 @@ def test_latlon_bad_gridmapping_varname(self): # Notes: # no coord-system # all the same as test_bad_gridmapping_nameproperty - with self.assertWarnsRegexp("Missing.*grid mapping variable 'grid'"): - result = self.run_testcase(gridmapvar_name="grid_2") + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase(warning=warning, gridmapvar_name="grid_2") self.check_result(result, cube_no_cs=True) def test_latlon_bad_latlon_unit(self): @@ -447,7 +505,7 @@ def test_latlon_bad_latlon_unit(self): # = "fc_provides_coordinate_latitude" does not trigger, because it is # not a valid latitude coordinate. result = self.run_testcase(latitude_units="degrees") - self.check_result(result, latitude_no_cs=True) + self.check_result(result, yco_no_cs=True) def test_mapping_rotated(self): # Test with rotated-latlon grid-mapping @@ -468,7 +526,7 @@ def test_mapping_rotated(self): # coords-build: lat+lon coords ROTATED, with coord-system # (rotated means different name + units) result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON ) self.check_result(result, cube_cstype=ics.RotatedGeogCS) @@ -494,29 +552,31 @@ def test_mapping_rotated(self): # coords-build: proj-x/-y_, with coord-system def test_mapping_albers(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_ALBERS) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_ALBERS) self.check_result(result, cube_cstype=ics.AlbersEqualArea) def test_mapping_geostationary(self): result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_GEOSTATIONARY + mapping_type_name=hh.CF_GRID_MAPPING_GEOSTATIONARY ) self.check_result(result, cube_cstype=ics.Geostationary) def test_mapping_lambert_azimuthal(self): result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL + mapping_type_name=hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL ) self.check_result(result, cube_cstype=ics.LambertAzimuthalEqualArea) def test_mapping_lambert_conformal(self): result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL + mapping_type_name=hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL ) self.check_result(result, cube_cstype=ics.LambertConformal) def test_mapping_mercator(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_MERCATOR) + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR + ) self.check_result(result, cube_cstype=ics.Mercator) def test_mapping_mercator__fail_unsupported(self): @@ -530,16 +590,17 @@ def test_mapping_mercator__fail_unsupported(self): # coords-build: NONE # = NO coord-system # = NO dim-coords built (cube has no coords) - with self.assertWarnsRegexp("not yet supported for Mercator"): - # Set a non-unity scale factor, which mercator cannot handle. - result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_MERCATOR, - mapping_scalefactor=2.0, - ) + # Set a non-unity scale factor, which mercator cannot handle. + warning = "not yet supported for Mercator" + result = self.run_testcase( + warning=warning, + mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR, + mapping_scalefactor=2.0, + ) self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) def test_mapping_stereographic(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_STEREO) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_STEREO) self.check_result(result, cube_cstype=ics.Stereographic) def test_mapping_stereographic__fail_unsupported(self): @@ -550,22 +611,239 @@ def test_mapping_stereographic__fail_unsupported(self): # Notes: # as for 'mercator__fail_unsupported', above # = NO dim-coords built (cube has no coords) - with self.assertWarnsRegexp("not yet supported for stereographic"): - # Set a non-unity scale factor, which stereo cannot handle. - result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_STEREO, - mapping_scalefactor=2.0, - ) + # + # Set a non-unity scale factor, which stereo cannot handle. + warning = "not yet supported for stereographic" + result = self.run_testcase( + warning=warning, + mapping_type_name=hh.CF_GRID_MAPPING_STEREO, + mapping_scalefactor=2.0, + ) self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) def test_mapping_transverse_mercator(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_TRANSVERSE) + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_TRANSVERSE + ) self.check_result(result, cube_cstype=ics.TransverseMercator) def test_mapping_vertical_perspective(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_VERTICAL) + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_VERTICAL + ) self.check_result(result, cube_cstype=ics.VerticalPerspective) + def test_mapping_unsupported(self): + # Use azimuthal, which is a real thing but we don't yet support it. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_projection_x_coordinate + # 003 : fc_provides_projection_y_coordinate + # NOTES: + # - there is no warning for this. + # TODO: perhaps there should be ? + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_AZIMUTHAL + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping_undefined(self): + # Use a random, unknown "mapping type". + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_projection_x_coordinate + # 003 : fc_provides_projection_y_coordinate + # NOTES: + # - there is no warning for this. + # TODO: perhaps there should be ? + result = self.run_testcase(mapping_type_name="unknown") + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + # + # Cases where names(+units) of coords don't match the grid-mapping type + # Effectively, there are 9 possibilities for (latlon/rotated/projected) + # coords against (latlon/rotated/projected/missing) coord-systems. + # N.B. the results are not all the same ... + # + + def test_mapping__mismatch__latlon_coords_rotated_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # NOTES: + # no build_coord triggers, as it requires the correct mapping type + # so no dim-coords at all in this case + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_name="longitude", + xco_units="degrees_east", + yco_name="latitude", + yco_units="degrees_north", + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping__mismatch__latlon_coords_nonll_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_albers_equal_area + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude_nocs + # 006 : fc_build_coordinate_longitude_nocs + # NOTES: + # build_coord_XXX_cs triggers, requires NO latlon/rotated mapping + # - but a non-ll mapping is 'ok'. + # TODO: not really clear why this is right ? + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, + xco_name="longitude", + xco_units="degrees_east", + yco_name="latitude", + yco_units="degrees_north", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__latlon_coords_missing_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_latitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_latitude_nocs + # 005 : fc_build_coordinate_longitude_nocs + # NOTES: + # same as nonll, except *NO* grid-mapping is detected, + # - which makes no practical difference + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase( + warning=warning, + gridmapvar_name="moved", + xco_name="longitude", + xco_units="degrees_east", + yco_name="latitude", + yco_units="degrees_north", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__rotated_coords_latlon_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # NOTES: + # no build_coord triggers : requires NO latlon/rotated mapping + # hence no coords at all + result = self.run_testcase( + xco_name="grid_longitude", + xco_units="degrees", + yco_name="grid_latitude", + yco_units="degrees", + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping__mismatch__rotated_coords_nonll_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_albers_equal_area + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude_nocs + # 006 : fc_build_coordinate_longitude_nocs + # NOTES: + # this is different from the previous + # build_coord.._nocs triggers : requires NO latlon/rotated mapping + # - which seems odd + inconsistent (with previous) ? + # TODO: should this change ?? + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, + xco_name="grid_longitude", + xco_units="degrees", + yco_name="grid_latitude", + yco_units="degrees", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__rotated_coords_missing_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_latitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_latitude_nocs + # 005 : fc_build_coordinate_longitude_nocs + # NOTES: + # as previous, but no grid-mapping (which makes no difference) + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase( + warning=warning, + gridmapvar_name="moved", + xco_name="grid_longitude", + xco_units="degrees", + yco_name="grid_latitude", + yco_units="degrees", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__nonll_coords_latlon_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_default_coordinate + # 004 : fc_default_coordinate + # NOTES: + # dim-coords built as "defaults" : dim-coords, but NO standard name + result = self.run_testcase( + xco_name="projection_x", + xco_units="m", + yco_name="projection_y", + yco_units="m", + ) + self.check_result( + result, cube_no_cs=True, xco_stdname=False, yco_stdname=False + ) + + def test_mapping__mismatch__nonll_coords_rotated_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_default_coordinate + # 004 : fc_default_coordinate + # NOTES: + # same as previous __mismatch__nonll_ + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_name="projection_x", + xco_units="m", + yco_name="projection_y", + yco_units="m", + ) + self.check_result( + result, cube_no_cs=True, xco_stdname=False, yco_stdname=False + ) + + def test_mapping__mismatch__nonll_coords_missing_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_default_coordinate + # 003 : fc_default_coordinate + # NOTES: + # effectively, just like previous 2 __mismatch__nonll_ + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase( + warning=warning, + gridmapvar_name="moved", + xco_name="projection_x", + xco_units="m", + yco_name="projection_y", + yco_units="m", + ) + self.check_result( + result, cube_no_cs=True, xco_stdname=False, yco_stdname=False + ) + class Test__grid_mapping__pyke_rules(Mixin__grid_mapping, tests.IrisTest): # Run grid-mapping tests with Pyke (rules) @@ -597,5 +875,35 @@ def tearDownClass(cls): super().tearDownClass() +class Test__additional(Mixin_Test__nc_load_actions, tests.IrisTest): + # Run grid-mapping tests with non-Pyke (actions) + use_pyke = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_nondim_lats(self): + # Check what happens when values don't allow a coord to be dim-coord. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude) + # 006 : fc_build_coordinate_(longitude) + # NOTES: + # in terms of rule triggers, this is not distinct from a normal case + # - but the latitude is now an aux-coord. + warning = "must be.* monotonic" + result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) + self.check_result(result, yco_is_aux=True) + + if __name__ == "__main__": tests.main() From 3f6e136f2119a7b0c88c5dc130998a054b7dc1e9 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 18:51:17 +0100 Subject: [PATCH 16/53] Refactor to put different test areas in their own sourcefiles. --- .../load_cube/load_cube__activate/__init__.py | 425 +++++++++++++++++ .../load_cube__activate/test__additional.py | 56 +++ .../test__grid_mappings.py} | 447 +----------------- 3 files changed, 486 insertions(+), 442 deletions(-) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py rename lib/iris/tests/unit/fileformats/netcdf/load_cube/{test__load_cube__activate.py => load_cube__activate/test__grid_mappings.py} (53%) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py new file mode 100644 index 0000000000..6d6c1a92f4 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -0,0 +1,425 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +For now, these tests are designed to function with **either** the "old" +Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the +"new" :mod:`iris.fileformats._nc_load_rules`. +Both of those supply an "engine" with an "activate" method + -- at least for now : may be simplified in future. + +""" +from pathlib import Path +import shutil +import subprocess +import tempfile + +from iris.fileformats.cf import CFReader +import iris.fileformats.netcdf +from iris.fileformats.netcdf import _load_cube +import iris.fileformats._nc_load_rules.engine +import iris.fileformats._nc_load_rules.helpers as hh + +""" +Notes on testing method. + +IN cf : "def _load_cube(engine, cf, cf_var, filename)" +WHERE: + - engine is a :class:`pyke.knowledge_engine.engine` + -- **OR** :class:`iris.fileformats._nc_load_rules.engine.Engine` + - cf is a CFReader + - cf_var is a CFDAtaVariable + +As it's hard to construct a suitable CFReader from scratch, it would seem +simpler (for now) to use an ACTUAL FILE. +Likewise, the easiest approach to that is with CDL and "ncgen". +To do this, we need a test "fixture" that can create suitable test files in a +temporary directory. + +""" + + +class Mixin__nc_load_actions: + """ + Class to make testcases for rules or actions code, and check results. + + Defines standard setUpClass/tearDownClass methods, to create a temporary + directory for intermediate files. + NOTE: owing to peculiarities of unittest, these must be explicitly called + from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the + actual Test_XXX class which also inherits unittest.TestCase. + + Testcases are manufactured by the '_make_testcase_cdl' method. + These are based on a 'standard simple latlon grid' example. + Various kwargs control variations on this. + + The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes + a result cube (by: producing cdl, converting to netcdf, and loading). + + The 'check_result' method performs various checks on the result, with + kwargs controlling the expected properties to be tested against. + This usage is *also* based on the 'standard simple latlon grid' example, + the kwargs specify expected differences from that. + + Can also test with either the Pyke(rules) or non-Pyke (actions) + implementations (for now). + + """ + + # + # "global" test settings + # + + # whether to test 'rules' or 'actions' implementations + # TODO: remove when Pyke is gone + use_pyke = True + + # whether to output various debug info + # TODO: ?possibly? remove when development is complete + debug = False + + @classmethod + def setUpClass(cls): + # # Control which testing method we are applying. + # Create a temp directory for temp files. + cls.temp_dirpath = Path(tempfile.mkdtemp()) + + @classmethod + def tearDownClass(cls): + # Destroy a temp directory for temp files. + shutil.rmtree(cls.temp_dirpath) + + def _make_testcase_cdl( + self, + latitude_units=None, + gridmapvar_name=None, + gridmapvar_mappropertyname=None, + mapping_missingradius=False, + mapping_type_name=None, + mapping_scalefactor=None, + yco_values=None, + xco_name=None, + yco_name=None, + xco_units=None, + yco_units=None, + ): + """ + Create a CDL string for a testcase. + + This is the "master" routine for creating all our testcases. + Kwarg options modify a simple default testcase with a latlon grid. + The routine handles the various testcase options and their possible + interactions. This includes knowing what extra changes are required + to support different grid-mapping types (for example). + + """ + # The grid-mapping options are standard-latlon, rotated, or non-latlon. + # This affects names+units of the X and Y coords. + # We don't have an option to *not* include a grid-mapping variable, but + # we can mimic a missing grid-mapping by changing the varname from that + # which the data-variable refers to, with "gridmapvar_name=xxx". + # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by + # selecting an unkown 'grid_mapping_name' property, with + # "gridmapvar_mappropertyname=xxx". + if mapping_type_name is None: + # Default grid-mapping and coords are standard lat-lon. + mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON + xco_name_default = hh.CF_VALUE_STD_NAME_LON + yco_name_default = hh.CF_VALUE_STD_NAME_LAT + xco_units_default = "degrees_east" + # Special kwarg overrides some of the values. + if latitude_units is None: + yco_units_default = "degrees_north" + else: + # Override the latitude units (to invalidate). + yco_units_default = latitude_units + + elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + # Rotated lat-lon coordinates. + xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON + yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT + xco_units_default = "degrees" + yco_units_default = "degrees" + + else: + # General non-latlon coordinates + # Exactly which depends on the grid_mapping name. + xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X + yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y + xco_units_default = "m" + yco_units_default = "m" + + # Options can override coord (standard) names and units. + if xco_name is None: + xco_name = xco_name_default + if yco_name is None: + yco_name = yco_name_default + if xco_units is None: + xco_units = xco_units_default + if yco_units is None: + yco_units = yco_units_default + + grid_mapping_name = "grid" + # Options can override the gridvar name and properties. + g_varname = gridmapvar_name + g_mapname = gridmapvar_mappropertyname + if g_varname is None: + g_varname = grid_mapping_name + if g_mapname is None: + # If you change this, it is no longer a valid grid-mapping var. + g_mapname = "grid_mapping_name" + + # Omit the earth radius, if requested. + if mapping_missingradius: + g_radius_string = "" + else: + g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" + g_string = f""" + int {g_varname} ; + {g_varname}:{g_mapname} = "{mapping_type_name}"; + {g_radius_string} + """ + + # Add a specified scale-factor, if requested. + if mapping_scalefactor is not None: + # Add a specific scale-factor term to the grid mapping. + # (Non-unity scale is not supported for Mercator/Stereographic). + sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN + g_string += f""" + {g_varname}:{sfapo_name} = {mapping_scalefactor} ; + """ + + # + # Add various additional (minimal) required properties for different + # grid mapping types. + # + + # Those which require 'latitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_TRANSVERSE, + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{latpo_name} = 0.0 ; + """ + # Those which require 'longitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{lonpo_name} = 0.0 ; + """ + # Those which require 'longitude of central meridian' + if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN + g_string += f""" + {g_varname}:{latcm_name} = 0.0 ; + """ + # Those which require 'perspective point height' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_VERTICAL, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + ): + pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT + g_string += f""" + {g_varname}:{pph_name} = 600000.0 ; + """ + # Those which require 'sweep angle axis' + if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS + g_string += f""" + {g_varname}:{saa_name} = "y" ; + """ + + # y-coord values + if yco_values is None: + yco_values = [10.0, 20.0] + yco_value_strings = [str(val) for val in yco_values] + yco_values_string = ", ".join(yco_value_strings) + + # Construct the total CDL string + cdl_string = f""" + netcdf test {{ + dimensions: + yco = 2 ; + xco = 3 ; + variables: + double phenom(yco, xco) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:grid_mapping = "grid" ; + double yco(yco) ; + yco:axis = "Y" ; + yco:units = "{yco_units}" ; + yco:standard_name = "{yco_name}" ; + double xco(xco) ; + xco:axis = "X" ; + xco:units = "{xco_units}" ; + xco:standard_name = "{xco_name}" ; + {g_string} + data: + yco = {yco_values_string} ; + xco = 100., 110., 120. ; + }} + """ + if self.debug: + print("File content:") + print(cdl_string) + print("------\n") + return cdl_string + + def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): + """ + Load the 'phenom' data variable in a CDL testcase, as a cube. + + Using ncgen, CFReader and the _load_cube call. + Can use a genuine Pyke engine, or the actions mimic engine, + selected by `self.use_pyke`. + + """ + # Write the CDL to a file. + with open(cdl_path, "w") as f_out: + f_out.write(cdl_string) + + # Create a netCDF file from the CDL file. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + subprocess.check_call(command, shell=True) + + # Simulate the inner part of the file reading process. + cf = CFReader(nc_path) + # Grab a data variable : FOR NOW, should be only 1 + cf_var = list(cf.cf_group.data_variables.values())[0] + cf_var = cf.cf_group.data_variables["phenom"] + + if self.use_pyke: + engine = iris.fileformats.netcdf._pyke_kb_engine_real() + else: + engine = iris.fileformats._nc_load_rules.engine.Engine() + + iris.fileformats.netcdf.DEBUG = self.debug + # iris.fileformats.netcdf.LOAD_PYKE = False + return _load_cube(engine, cf, cf_var, nc_path) + + def run_testcase(self, warning=None, **testcase_kwargs): + """ + Run a testcase with chosen options, returning a test cube. + + The kwargs apply to the '_make_testcase_cdl' method. + + """ + cdl_path = str(self.temp_dirpath / "test.cdl") + nc_path = cdl_path.replace(".cdl", ".nc") + cdl_string = self._make_testcase_cdl(**testcase_kwargs) + if warning is None: + context = self.assertNoWarningsRegexp() + else: + context = self.assertWarnsRegexp(warning) + with context: + cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) + if self.debug: + print("\nCube:") + print(cube) + print("") + return cube + + def check_result( + self, + cube, + cube_cstype=None, + cube_no_cs=False, + cube_no_xycoords=False, + xco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_is_aux=False, + xco_stdname=True, + yco_stdname=True, + ): + """ + Check key properties of a result cube. + + Various options control the expected things which are tested. + """ + self.assertEqual(cube.standard_name, "air_temperature") + self.assertEqual(cube.var_name, "phenom") + + x_coords = cube.coords(dimensions=(1,)) + y_coords = cube.coords(dimensions=(0,)) + if yco_is_aux: + expected_dim_coords = x_coords + expected_aux_coords = y_coords + else: + expected_dim_coords = x_coords + y_coords + expected_aux_coords = [] + + self.assertEqual( + set(expected_dim_coords), set(cube.coords(dim_coords=True)) + ) + if cube_no_xycoords: + self.assertEqual(expected_dim_coords, []) + x_coord = None + y_coord = None + else: + self.assertEqual(len(x_coords), 1) + (x_coord,) = x_coords + self.assertEqual(len(y_coords), 1) + (y_coord,) = y_coords + + self.assertEqual( + set(expected_aux_coords), set(cube.coords(dim_coords=False)) + ) + + if x_coord: + if xco_stdname is None: + # no check + pass + elif xco_stdname is True: + self.assertIsNotNone(x_coord.standard_name) + elif xco_stdname is False: + self.assertIsNone(x_coord.standard_name) + else: + self.assertEqual(x_coord.standard_name, xco_stdname) + + if y_coord: + if yco_stdname is None: + # no check + pass + if yco_stdname is True: + self.assertIsNotNone(y_coord.standard_name) + elif yco_stdname is False: + self.assertIsNone(y_coord.standard_name) + else: + self.assertEqual(y_coord.standard_name, yco_stdname) + + cube_cs = cube.coord_system() + if cube_no_xycoords: + yco_cs = None + xco_cs = None + else: + yco_cs = y_coord.coord_system + xco_cs = x_coord.coord_system + if cube_no_cs: + self.assertIsNone(cube_cs) + self.assertIsNone(yco_cs) + self.assertIsNone(xco_cs) + else: + if cube_cstype is not None: + self.assertIsInstance(cube_cs, cube_cstype) + if xco_no_cs: + self.assertIsNone(xco_cs) + else: + self.assertEqual(xco_cs, cube_cs) + if yco_no_cs: + self.assertIsNone(yco_cs) + else: + self.assertEqual(yco_cs, cube_cs) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py new file mode 100644 index 0000000000..0678e3b307 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py @@ -0,0 +1,56 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +For now, these tests are designed to function with **either** the "old" +Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the +"new" :mod:`iris.fileformats._nc_load_rules`. +Both of those supply an "engine" with an "activate" method + -- at least for now : may be simplified in future. + +""" +import iris.tests as tests + + +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) + + +class Test__additional(Mixin__nc_load_actions, tests.IrisTest): + # Run grid-mapping tests with non-Pyke (actions) + use_pyke = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_nondim_lats(self): + # Check what happens when values don't allow a coord to be dim-coord. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude) + # 006 : fc_build_coordinate_(longitude) + # NOTES: + # in terms of rule triggers, this is not distinct from a normal case + # - but the latitude is now an aux-coord. + warning = "must be.* monotonic" + result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) + self.check_result(result, yco_is_aux=True) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py similarity index 53% rename from lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py rename to lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index 4d40370e56..ac495ac910 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -7,427 +7,20 @@ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. -For now, these tests are designed to function with **either** the "old" -Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the -"new" :mod:`iris.fileformats._nc_load_rules`. -Both of those supply an "activate" call (for now : may be simplified in future). +Here, *specifically* testcases relating to grid-mappings and dim-coords. """ import iris.tests as tests -from pathlib import Path -import shutil -import subprocess -import tempfile - import iris.coord_systems as ics -from iris.fileformats.cf import CFReader -import iris.fileformats.netcdf -from iris.fileformats.netcdf import _load_cube -import iris.fileformats._nc_load_rules.engine import iris.fileformats._nc_load_rules.helpers as hh -""" -Notes on testing method. - -IN cf : "def _load_cube(engine, cf, cf_var, filename)" -WHERE: - - engine is a :class:`pyke.knowledge_engine.engine` - -- **OR** :class:`iris.fileformats._nc_load_rules.engine.Engine` - - cf is a CFReader - - cf_var is a CFDAtaVariable - -As it's hard to construct a suitable CFReader from scratch, it would seem -simpler (for now) to use an ACTUAL FILE. -Likewise, the easiest approach to that is with CDL and "ncgen". -To do this, we need a test "fixture" that can create suitable test files in a -temporary directory. - -""" - - -class Mixin_Test__nc_load_actions: - """ - Class to make testcases for rules or actions code, and check results. - - Defines standard setUpClass/tearDownClass methods, to create a temporary - directory for intermediate files. - NOTE: owing to peculiarities of unittest, these must be explicitly called - from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the - actual Test_XXX class which also inherits unittest.TestCase. - - Testcases are manufactured by the '_make_testcase_cdl' method. - These are based on a 'standard simple latlon grid' example. - Various kwargs control variations on this. - - The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes - a result cube (by: producing cdl, converting to netcdf, and loading). - - The 'check_result' method performs various checks on the result, with - kwargs controlling the expected properties to be tested against. - This usage is *also* based on the 'standard simple latlon grid' example, - the kwargs specify expected differences from that. - - Can also test with either the Pyke(rules) or non-Pyke (actions) - implementations (for now). - - """ - - # - # "global" test settings - # - - # whether to test 'rules' or 'actions' implementations - # TODO: remove when Pyke is gone - use_pyke = True - - # whether to output various debug info - # TODO: ?possibly? remove when development is complete - debug = False - - @classmethod - def setUpClass(cls): - # # Control which testing method we are applying. - # Create a temp directory for temp files. - cls.temp_dirpath = Path(tempfile.mkdtemp()) - - @classmethod - def tearDownClass(cls): - # Destroy a temp directory for temp files. - shutil.rmtree(cls.temp_dirpath) - - def _make_testcase_cdl( - self, - latitude_units=None, - gridmapvar_name=None, - gridmapvar_mappropertyname=None, - mapping_missingradius=False, - mapping_type_name=None, - mapping_scalefactor=None, - yco_values=None, - xco_name=None, - yco_name=None, - xco_units=None, - yco_units=None, - ): - """ - Create a CDL string for a testcase. - - This is the "master" routine for creating all our testcases. - Kwarg options modify a simple default testcase with a latlon grid. - The routine handles the various testcase options and their possible - interactions. This includes knowing what extra changes are required - to support different grid-mapping types (for example). - - """ - # The grid-mapping options are standard-latlon, rotated, or non-latlon. - # This affects names+units of the X and Y coords. - # We don't have an option to *not* include a grid-mapping variable, but - # we can mimic a missing grid-mapping by changing the varname from that - # which the data-variable refers to, with "gridmapvar_name=xxx". - # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by - # selecting an unkown 'grid_mapping_name' property, with - # "gridmapvar_mappropertyname=xxx". - if mapping_type_name is None: - # Default grid-mapping and coords are standard lat-lon. - mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON - xco_name_default = hh.CF_VALUE_STD_NAME_LON - yco_name_default = hh.CF_VALUE_STD_NAME_LAT - xco_units_default = "degrees_east" - # Special kwarg overrides some of the values. - if latitude_units is None: - yco_units_default = "degrees_north" - else: - # Override the latitude units (to invalidate). - yco_units_default = latitude_units - - elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: - # Rotated lat-lon coordinates. - xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON - yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT - xco_units_default = "degrees" - yco_units_default = "degrees" - - else: - # General non-latlon coordinates - # Exactly which depends on the grid_mapping name. - xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X - yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y - xco_units_default = "m" - yco_units_default = "m" - - # Options can override coord (standard) names and units. - if xco_name is None: - xco_name = xco_name_default - if yco_name is None: - yco_name = yco_name_default - if xco_units is None: - xco_units = xco_units_default - if yco_units is None: - yco_units = yco_units_default - - grid_mapping_name = "grid" - # Options can override the gridvar name and properties. - g_varname = gridmapvar_name - g_mapname = gridmapvar_mappropertyname - if g_varname is None: - g_varname = grid_mapping_name - if g_mapname is None: - # If you change this, it is no longer a valid grid-mapping var. - g_mapname = "grid_mapping_name" - - # Omit the earth radius, if requested. - if mapping_missingradius: - g_radius_string = "" - else: - g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" - g_string = f""" - int {g_varname} ; - {g_varname}:{g_mapname} = "{mapping_type_name}"; - {g_radius_string} - """ - - # Add a specified scale-factor, if requested. - if mapping_scalefactor is not None: - # Add a specific scale-factor term to the grid mapping. - # (Non-unity scale is not supported for Mercator/Stereographic). - sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN - g_string += f""" - {g_varname}:{sfapo_name} = {mapping_scalefactor} ; - """ - - # - # Add various additional (minimal) required properties for different - # grid mapping types. - # +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) - # Those which require 'latitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_TRANSVERSE, - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{latpo_name} = 0.0 ; - """ - # Those which require 'longitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{lonpo_name} = 0.0 ; - """ - # Those which require 'longitude of central meridian' - if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): - latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN - g_string += f""" - {g_varname}:{latcm_name} = 0.0 ; - """ - # Those which require 'perspective point height' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_VERTICAL, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - ): - pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT - g_string += f""" - {g_varname}:{pph_name} = 600000.0 ; - """ - # Those which require 'sweep angle axis' - if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): - saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS - g_string += f""" - {g_varname}:{saa_name} = "y" ; - """ - - # y-coord values - if yco_values is None: - yco_values = [10.0, 20.0] - yco_value_strings = [str(val) for val in yco_values] - yco_values_string = ", ".join(yco_value_strings) - - # Construct the total CDL string - cdl_string = f""" - netcdf test {{ - dimensions: - yco = 2 ; - xco = 3 ; - variables: - double phenom(yco, xco) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - phenom:grid_mapping = "grid" ; - double yco(yco) ; - yco:axis = "Y" ; - yco:units = "{yco_units}" ; - yco:standard_name = "{yco_name}" ; - double xco(xco) ; - xco:axis = "X" ; - xco:units = "{xco_units}" ; - xco:standard_name = "{xco_name}" ; - {g_string} - data: - yco = {yco_values_string} ; - xco = 100., 110., 120. ; - }} - """ - if self.debug: - print("File content:") - print(cdl_string) - print("------\n") - return cdl_string - - def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): - """ - Load the 'phenom' data variable in a CDL testcase, as a cube. - - Using ncgen, CFReader and the _load_cube call. - Can use a genuine Pyke engine, or the actions mimic engine, - selected by `self.use_pyke`. - - """ - # Write the CDL to a file. - with open(cdl_path, "w") as f_out: - f_out.write(cdl_string) - - # Create a netCDF file from the CDL file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - subprocess.check_call(command, shell=True) - - # Simulate the inner part of the file reading process. - cf = CFReader(nc_path) - # Grab a data variable : FOR NOW, should be only 1 - cf_var = list(cf.cf_group.data_variables.values())[0] - cf_var = cf.cf_group.data_variables["phenom"] - - if self.use_pyke: - engine = iris.fileformats.netcdf._pyke_kb_engine_real() - else: - engine = iris.fileformats._nc_load_rules.engine.Engine() - - iris.fileformats.netcdf.DEBUG = self.debug - # iris.fileformats.netcdf.LOAD_PYKE = False - return _load_cube(engine, cf, cf_var, nc_path) - - def run_testcase(self, warning=None, **testcase_kwargs): - """ - Run a testcase with chosen options, returning a test cube. - - The kwargs apply to the '_make_testcase_cdl' method. - - """ - cdl_path = str(self.temp_dirpath / "test.cdl") - nc_path = cdl_path.replace(".cdl", ".nc") - cdl_string = self._make_testcase_cdl(**testcase_kwargs) - if warning is None: - context = self.assertNoWarningsRegexp() - else: - context = self.assertWarnsRegexp(warning) - with context: - cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) - if self.debug: - print("\nCube:") - print(cube) - print("") - return cube - - def check_result( - self, - cube, - cube_cstype=None, - cube_no_cs=False, - cube_no_xycoords=False, - xco_no_cs=False, # N.B. no effect if cube_no_cs is True - yco_no_cs=False, # N.B. no effect if cube_no_cs is True - yco_is_aux=False, - xco_stdname=True, - yco_stdname=True, - ): - """ - Check key properties of a result cube. - - Various options control the expected things which are tested. - """ - self.assertEqual(cube.standard_name, "air_temperature") - self.assertEqual(cube.var_name, "phenom") - - x_coords = cube.coords(dimensions=(1,)) - y_coords = cube.coords(dimensions=(0,)) - if yco_is_aux: - expected_dim_coords = x_coords - expected_aux_coords = y_coords - else: - expected_dim_coords = x_coords + y_coords - expected_aux_coords = [] - - self.assertEqual( - set(expected_dim_coords), set(cube.coords(dim_coords=True)) - ) - if cube_no_xycoords: - self.assertEqual(expected_dim_coords, []) - x_coord = None - y_coord = None - else: - self.assertEqual(len(x_coords), 1) - (x_coord,) = x_coords - self.assertEqual(len(y_coords), 1) - (y_coord,) = y_coords - - self.assertEqual( - set(expected_aux_coords), set(cube.coords(dim_coords=False)) - ) - if x_coord: - if xco_stdname is None: - # no check - pass - elif xco_stdname is True: - self.assertIsNotNone(x_coord.standard_name) - elif xco_stdname is False: - self.assertIsNone(x_coord.standard_name) - else: - self.assertEqual(x_coord.standard_name, xco_stdname) - - if y_coord: - if yco_stdname is None: - # no check - pass - if yco_stdname is True: - self.assertIsNotNone(y_coord.standard_name) - elif yco_stdname is False: - self.assertIsNone(y_coord.standard_name) - else: - self.assertEqual(y_coord.standard_name, yco_stdname) - - cube_cs = cube.coord_system() - if cube_no_xycoords: - yco_cs = None - xco_cs = None - else: - yco_cs = y_coord.coord_system - xco_cs = x_coord.coord_system - if cube_no_cs: - self.assertIsNone(cube_cs) - self.assertIsNone(yco_cs) - self.assertIsNone(xco_cs) - else: - if cube_cstype is not None: - self.assertIsInstance(cube_cs, cube_cstype) - if xco_no_cs: - self.assertIsNone(xco_cs) - else: - self.assertEqual(xco_cs, cube_cs) - if yco_no_cs: - self.assertIsNone(yco_cs) - else: - self.assertEqual(yco_cs, cube_cs) - - -class Mixin__grid_mapping(Mixin_Test__nc_load_actions): +class Mixin__grid_mapping(Mixin__nc_load_actions): # Various testcases for translation of grid-mappings def test_basic_latlon(self): @@ -875,35 +468,5 @@ def tearDownClass(cls): super().tearDownClass() -class Test__additional(Mixin_Test__nc_load_actions, tests.IrisTest): - # Run grid-mapping tests with non-Pyke (actions) - use_pyke = False - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def test_nondim_lats(self): - # Check what happens when values don't allow a coord to be dim-coord. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(latitude) - # 006 : fc_build_coordinate_(longitude) - # NOTES: - # in terms of rule triggers, this is not distinct from a normal case - # - but the latitude is now an aux-coord. - warning = "must be.* monotonic" - result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) - self.check_result(result, yco_is_aux=True) - - if __name__ == "__main__": tests.main() From fe8ee7ef94d38ae08c57b462d75d5fe56eb58481 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 6 Jun 2021 23:18:35 +0100 Subject: [PATCH 17/53] Small fix to actions code. --- lib/iris/fileformats/_nc_load_rules/actions.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 38b2b96d51..bfe7999789 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -280,9 +280,10 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): rule_name = "fc_build_auxiliary_coordinate" # FOR NOW: attempt to identify type - # TODO: can maybe eventually remove this, as it only affects rule_name. + # TODO: eventually remove much of this, which only affects rule_name. # (but could possibly retain for future debugging purposes) coord_type = "" # unidentified : can be OK + coord_name = None if hh.is_time(engine, var_name): coord_type = "time" elif hh.is_time_period(engine, var_name): @@ -291,18 +292,22 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): coord_type = "longitude" if hh.is_rotated_longitude(engine, var_name): coord_type += "_rotated" + coord_name = hh.CF_VALUE_STD_NAME_GRID_LON + else: + coord_name = hh.CF_VALUE_STD_NAME_LON elif hh.is_latitude(engine, var_name): coord_type = "latitude" if hh.is_rotated_latitude(engine, var_name): coord_type += "_rotated" + coord_name = hh.CF_VALUE_STD_NAME_GRID_LAT + else: + coord_name = hh.CF_VALUE_STD_NAME_LAT if coord_type: rule_name += f"_{coord_type}" cf_var = engine.cf_var.cf_group.auxiliary_coordinates[var_name] - hh.build_auxiliary_coordinate( - engine, cf_var, coord_name=hh.CF_VALUE_STD_NAME_GRID_LON - ) + hh.build_auxiliary_coordinate(engine, cf_var, coord_name=coord_name) return rule_name From 75550fcb29fb12cf4fdf8e207271499a9047870c Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 6 Jun 2021 23:19:56 +0100 Subject: [PATCH 18/53] Move grid-mapping-specific content out of common load_cube__activate code. --- .../load_cube/load_cube__activate/__init__.py | 287 +----------------- .../load_cube__activate/test__additional.py | 6 +- .../test__grid_mappings.py | 287 +++++++++++++++++- 3 files changed, 298 insertions(+), 282 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index 6d6c1a92f4..a2f3d4ce31 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -23,7 +23,6 @@ import iris.fileformats.netcdf from iris.fileformats.netcdf import _load_cube import iris.fileformats._nc_load_rules.engine -import iris.fileformats._nc_load_rules.helpers as hh """ Notes on testing method. @@ -94,192 +93,7 @@ def tearDownClass(cls): # Destroy a temp directory for temp files. shutil.rmtree(cls.temp_dirpath) - def _make_testcase_cdl( - self, - latitude_units=None, - gridmapvar_name=None, - gridmapvar_mappropertyname=None, - mapping_missingradius=False, - mapping_type_name=None, - mapping_scalefactor=None, - yco_values=None, - xco_name=None, - yco_name=None, - xco_units=None, - yco_units=None, - ): - """ - Create a CDL string for a testcase. - - This is the "master" routine for creating all our testcases. - Kwarg options modify a simple default testcase with a latlon grid. - The routine handles the various testcase options and their possible - interactions. This includes knowing what extra changes are required - to support different grid-mapping types (for example). - - """ - # The grid-mapping options are standard-latlon, rotated, or non-latlon. - # This affects names+units of the X and Y coords. - # We don't have an option to *not* include a grid-mapping variable, but - # we can mimic a missing grid-mapping by changing the varname from that - # which the data-variable refers to, with "gridmapvar_name=xxx". - # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by - # selecting an unkown 'grid_mapping_name' property, with - # "gridmapvar_mappropertyname=xxx". - if mapping_type_name is None: - # Default grid-mapping and coords are standard lat-lon. - mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON - xco_name_default = hh.CF_VALUE_STD_NAME_LON - yco_name_default = hh.CF_VALUE_STD_NAME_LAT - xco_units_default = "degrees_east" - # Special kwarg overrides some of the values. - if latitude_units is None: - yco_units_default = "degrees_north" - else: - # Override the latitude units (to invalidate). - yco_units_default = latitude_units - - elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: - # Rotated lat-lon coordinates. - xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON - yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT - xco_units_default = "degrees" - yco_units_default = "degrees" - - else: - # General non-latlon coordinates - # Exactly which depends on the grid_mapping name. - xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X - yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y - xco_units_default = "m" - yco_units_default = "m" - - # Options can override coord (standard) names and units. - if xco_name is None: - xco_name = xco_name_default - if yco_name is None: - yco_name = yco_name_default - if xco_units is None: - xco_units = xco_units_default - if yco_units is None: - yco_units = yco_units_default - - grid_mapping_name = "grid" - # Options can override the gridvar name and properties. - g_varname = gridmapvar_name - g_mapname = gridmapvar_mappropertyname - if g_varname is None: - g_varname = grid_mapping_name - if g_mapname is None: - # If you change this, it is no longer a valid grid-mapping var. - g_mapname = "grid_mapping_name" - - # Omit the earth radius, if requested. - if mapping_missingradius: - g_radius_string = "" - else: - g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" - g_string = f""" - int {g_varname} ; - {g_varname}:{g_mapname} = "{mapping_type_name}"; - {g_radius_string} - """ - - # Add a specified scale-factor, if requested. - if mapping_scalefactor is not None: - # Add a specific scale-factor term to the grid mapping. - # (Non-unity scale is not supported for Mercator/Stereographic). - sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN - g_string += f""" - {g_varname}:{sfapo_name} = {mapping_scalefactor} ; - """ - - # - # Add various additional (minimal) required properties for different - # grid mapping types. - # - - # Those which require 'latitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_TRANSVERSE, - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{latpo_name} = 0.0 ; - """ - # Those which require 'longitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{lonpo_name} = 0.0 ; - """ - # Those which require 'longitude of central meridian' - if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): - latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN - g_string += f""" - {g_varname}:{latcm_name} = 0.0 ; - """ - # Those which require 'perspective point height' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_VERTICAL, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - ): - pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT - g_string += f""" - {g_varname}:{pph_name} = 600000.0 ; - """ - # Those which require 'sweep angle axis' - if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): - saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS - g_string += f""" - {g_varname}:{saa_name} = "y" ; - """ - - # y-coord values - if yco_values is None: - yco_values = [10.0, 20.0] - yco_value_strings = [str(val) for val in yco_values] - yco_values_string = ", ".join(yco_value_strings) - - # Construct the total CDL string - cdl_string = f""" - netcdf test {{ - dimensions: - yco = 2 ; - xco = 3 ; - variables: - double phenom(yco, xco) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - phenom:grid_mapping = "grid" ; - double yco(yco) ; - yco:axis = "Y" ; - yco:units = "{yco_units}" ; - yco:standard_name = "{yco_name}" ; - double xco(xco) ; - xco:axis = "X" ; - xco:units = "{xco_units}" ; - xco:standard_name = "{xco_name}" ; - {g_string} - data: - yco = {yco_values_string} ; - xco = 100., 110., 120. ; - }} - """ - if self.debug: - print("File content:") - print(cdl_string) - print("------\n") - return cdl_string - - def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): + def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): """ Load the 'phenom' data variable in a CDL testcase, as a cube. @@ -326,100 +140,19 @@ def run_testcase(self, warning=None, **testcase_kwargs): else: context = self.assertWarnsRegexp(warning) with context: - cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) + cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) if self.debug: print("\nCube:") print(cube) print("") return cube - def check_result( - self, - cube, - cube_cstype=None, - cube_no_cs=False, - cube_no_xycoords=False, - xco_no_cs=False, # N.B. no effect if cube_no_cs is True - yco_no_cs=False, # N.B. no effect if cube_no_cs is True - yco_is_aux=False, - xco_stdname=True, - yco_stdname=True, - ): - """ - Check key properties of a result cube. + def _make_testcase_cdl(self, **kwargs): + """Make a testcase CDL string.""" + # Override for specific uses... + raise NotImplementedError() - Various options control the expected things which are tested. - """ - self.assertEqual(cube.standard_name, "air_temperature") - self.assertEqual(cube.var_name, "phenom") - - x_coords = cube.coords(dimensions=(1,)) - y_coords = cube.coords(dimensions=(0,)) - if yco_is_aux: - expected_dim_coords = x_coords - expected_aux_coords = y_coords - else: - expected_dim_coords = x_coords + y_coords - expected_aux_coords = [] - - self.assertEqual( - set(expected_dim_coords), set(cube.coords(dim_coords=True)) - ) - if cube_no_xycoords: - self.assertEqual(expected_dim_coords, []) - x_coord = None - y_coord = None - else: - self.assertEqual(len(x_coords), 1) - (x_coord,) = x_coords - self.assertEqual(len(y_coords), 1) - (y_coord,) = y_coords - - self.assertEqual( - set(expected_aux_coords), set(cube.coords(dim_coords=False)) - ) - - if x_coord: - if xco_stdname is None: - # no check - pass - elif xco_stdname is True: - self.assertIsNotNone(x_coord.standard_name) - elif xco_stdname is False: - self.assertIsNone(x_coord.standard_name) - else: - self.assertEqual(x_coord.standard_name, xco_stdname) - - if y_coord: - if yco_stdname is None: - # no check - pass - if yco_stdname is True: - self.assertIsNotNone(y_coord.standard_name) - elif yco_stdname is False: - self.assertIsNone(y_coord.standard_name) - else: - self.assertEqual(y_coord.standard_name, yco_stdname) - - cube_cs = cube.coord_system() - if cube_no_xycoords: - yco_cs = None - xco_cs = None - else: - yco_cs = y_coord.coord_system - xco_cs = x_coord.coord_system - if cube_no_cs: - self.assertIsNone(cube_cs) - self.assertIsNone(yco_cs) - self.assertIsNone(xco_cs) - else: - if cube_cstype is not None: - self.assertIsInstance(cube_cs, cube_cstype) - if xco_no_cs: - self.assertIsNone(xco_cs) - else: - self.assertEqual(xco_cs, cube_cs) - if yco_no_cs: - self.assertIsNone(yco_cs) - else: - self.assertEqual(yco_cs, cube_cs) + def check_result(self, cube, **kwargs): + """Test a result cube.""" + # Override for specific uses... + raise NotImplementedError() diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py index 0678e3b307..ce5b3ad7b7 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py @@ -17,12 +17,12 @@ import iris.tests as tests -from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( - Mixin__nc_load_actions, +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate.test__grid_mappings import ( + Mixin__grid_mapping, ) -class Test__additional(Mixin__nc_load_actions, tests.IrisTest): +class Test__additional(Mixin__grid_mapping, tests.IrisTest): # Run grid-mapping tests with non-Pyke (actions) use_pyke = False diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index ac495ac910..41b288195e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -21,6 +21,285 @@ class Mixin__grid_mapping(Mixin__nc_load_actions): + # Testcase support routines for testing translation of grid-mappings + def _make_testcase_cdl( + self, + latitude_units=None, + gridmapvar_name=None, + gridmapvar_mappropertyname=None, + mapping_missingradius=False, + mapping_type_name=None, + mapping_scalefactor=None, + yco_values=None, + xco_name=None, + yco_name=None, + xco_units=None, + yco_units=None, + ): + """ + Create a CDL string for a testcase. + + This is the "master" routine for creating all our testcases. + Kwarg options modify a simple default testcase with a latlon grid. + The routine handles the various testcase options and their possible + interactions. This includes knowing what extra changes are required + to support different grid-mapping types (for example). + + """ + # The grid-mapping options are standard-latlon, rotated, or non-latlon. + # This affects names+units of the X and Y coords. + # We don't have an option to *not* include a grid-mapping variable, but + # we can mimic a missing grid-mapping by changing the varname from that + # which the data-variable refers to, with "gridmapvar_name=xxx". + # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by + # selecting an unkown 'grid_mapping_name' property, with + # "gridmapvar_mappropertyname=xxx". + if mapping_type_name is None: + # Default grid-mapping and coords are standard lat-lon. + mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON + xco_name_default = hh.CF_VALUE_STD_NAME_LON + yco_name_default = hh.CF_VALUE_STD_NAME_LAT + xco_units_default = "degrees_east" + # Special kwarg overrides some of the values. + if latitude_units is None: + yco_units_default = "degrees_north" + else: + # Override the latitude units (to invalidate). + yco_units_default = latitude_units + + elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + # Rotated lat-lon coordinates. + xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON + yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT + xco_units_default = "degrees" + yco_units_default = "degrees" + + else: + # General non-latlon coordinates + # Exactly which depends on the grid_mapping name. + xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X + yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y + xco_units_default = "m" + yco_units_default = "m" + + # Options can override coord (standard) names and units. + if xco_name is None: + xco_name = xco_name_default + if yco_name is None: + yco_name = yco_name_default + if xco_units is None: + xco_units = xco_units_default + if yco_units is None: + yco_units = yco_units_default + + grid_mapping_name = "grid" + # Options can override the gridvar name and properties. + g_varname = gridmapvar_name + g_mapname = gridmapvar_mappropertyname + if g_varname is None: + g_varname = grid_mapping_name + if g_mapname is None: + # If you change this, it is no longer a valid grid-mapping var. + g_mapname = "grid_mapping_name" + + # Omit the earth radius, if requested. + if mapping_missingradius: + g_radius_string = "" + else: + g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" + g_string = f""" + int {g_varname} ; + {g_varname}:{g_mapname} = "{mapping_type_name}"; + {g_radius_string} + """ + + # Add a specified scale-factor, if requested. + if mapping_scalefactor is not None: + # Add a specific scale-factor term to the grid mapping. + # (Non-unity scale is not supported for Mercator/Stereographic). + sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN + g_string += f""" + {g_varname}:{sfapo_name} = {mapping_scalefactor} ; + """ + + # + # Add various additional (minimal) required properties for different + # grid mapping types. + # + + # Those which require 'latitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_TRANSVERSE, + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{latpo_name} = 0.0 ; + """ + # Those which require 'longitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{lonpo_name} = 0.0 ; + """ + # Those which require 'longitude of central meridian' + if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN + g_string += f""" + {g_varname}:{latcm_name} = 0.0 ; + """ + # Those which require 'perspective point height' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_VERTICAL, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + ): + pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT + g_string += f""" + {g_varname}:{pph_name} = 600000.0 ; + """ + # Those which require 'sweep angle axis' + if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS + g_string += f""" + {g_varname}:{saa_name} = "y" ; + """ + + # y-coord values + if yco_values is None: + yco_values = [10.0, 20.0] + yco_value_strings = [str(val) for val in yco_values] + yco_values_string = ", ".join(yco_value_strings) + + # Construct the total CDL string + cdl_string = f""" + netcdf test {{ + dimensions: + yco = 2 ; + xco = 3 ; + variables: + double phenom(yco, xco) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:grid_mapping = "grid" ; + double yco(yco) ; + yco:axis = "Y" ; + yco:units = "{yco_units}" ; + yco:standard_name = "{yco_name}" ; + double xco(xco) ; + xco:axis = "X" ; + xco:units = "{xco_units}" ; + xco:standard_name = "{xco_name}" ; + {g_string} + data: + yco = {yco_values_string} ; + xco = 100., 110., 120. ; + }} + """ + if self.debug: + print("File content:") + print(cdl_string) + print("------\n") + return cdl_string + + def check_result( + self, + cube, + cube_cstype=None, + cube_no_cs=False, + cube_no_xycoords=False, + xco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_is_aux=False, + xco_stdname=True, + yco_stdname=True, + ): + """ + Check key properties of a result cube. + + Various options control the expected things which are tested. + """ + self.assertEqual(cube.standard_name, "air_temperature") + self.assertEqual(cube.var_name, "phenom") + + x_coords = cube.coords(dimensions=(1,)) + y_coords = cube.coords(dimensions=(0,)) + if yco_is_aux: + expected_dim_coords = x_coords + expected_aux_coords = y_coords + else: + expected_dim_coords = x_coords + y_coords + expected_aux_coords = [] + + self.assertEqual( + set(expected_dim_coords), set(cube.coords(dim_coords=True)) + ) + if cube_no_xycoords: + self.assertEqual(expected_dim_coords, []) + x_coord = None + y_coord = None + else: + self.assertEqual(len(x_coords), 1) + (x_coord,) = x_coords + self.assertEqual(len(y_coords), 1) + (y_coord,) = y_coords + + self.assertEqual( + set(expected_aux_coords), set(cube.coords(dim_coords=False)) + ) + + if x_coord: + if xco_stdname is None: + # no check + pass + elif xco_stdname is True: + self.assertIsNotNone(x_coord.standard_name) + elif xco_stdname is False: + self.assertIsNone(x_coord.standard_name) + else: + self.assertEqual(x_coord.standard_name, xco_stdname) + + if y_coord: + if yco_stdname is None: + # no check + pass + if yco_stdname is True: + self.assertIsNotNone(y_coord.standard_name) + elif yco_stdname is False: + self.assertIsNone(y_coord.standard_name) + else: + self.assertEqual(y_coord.standard_name, yco_stdname) + + cube_cs = cube.coord_system() + if cube_no_xycoords: + yco_cs = None + xco_cs = None + else: + yco_cs = y_coord.coord_system + xco_cs = x_coord.coord_system + if cube_no_cs: + self.assertIsNone(cube_cs) + self.assertIsNone(yco_cs) + self.assertIsNone(xco_cs) + else: + if cube_cstype is not None: + self.assertIsInstance(cube_cs, cube_cstype) + if xco_no_cs: + self.assertIsNone(xco_cs) + else: + self.assertEqual(xco_cs, cube_cs) + if yco_no_cs: + self.assertIsNone(yco_cs) + else: + self.assertEqual(yco_cs, cube_cs) + + +class Mixin__grid_mapping__tests(Mixin__grid_mapping): # Various testcases for translation of grid-mappings def test_basic_latlon(self): @@ -438,7 +717,9 @@ def test_mapping__mismatch__nonll_coords_missing_system(self): ) -class Test__grid_mapping__pyke_rules(Mixin__grid_mapping, tests.IrisTest): +class Test__grid_mapping__pyke_rules( + Mixin__grid_mapping__tests, tests.IrisTest +): # Run grid-mapping tests with Pyke (rules) use_pyke = True @@ -455,7 +736,9 @@ def tearDownClass(cls): @skip -class Test__grid_mapping__nonpyke_actions(Mixin__grid_mapping, tests.IrisTest): +class Test__grid_mapping__nonpyke_actions( + Mixin__grid_mapping__tests, tests.IrisTest +): # Run grid-mapping tests with non-Pyke (actions) use_pyke = False From 97f29761b2f0649e448ac442ecbfaa3de6c7f356 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 6 Jun 2021 23:23:38 +0100 Subject: [PATCH 19/53] Add tests for time rules. --- .../load_cube__activate/test__time_coords.py | 458 ++++++++++++++++++ 1 file changed, 458 insertions(+) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py new file mode 100644 index 0000000000..0102be1817 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -0,0 +1,458 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Tests for rules activation relating to 'time' and 'time_period' coords. + +""" +import iris.tests as tests + + +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) + + +_OPTIONS_TEMPLATE = { + "which": "", # set to "something" + "stdname": "_auto_which", # default = time / time_period + "varname": "_as_which", # default = time / period + "dimname": "_as_which", + "in_phenomvar_dims": True, + "in_phenomvar_coords": False, # set for an aux-coord + "values_all_zero": False, # set to block CFDimensionVariable identity + "units": "_auto_which", # specific to time/period +} + + +class Opts: + # A dict-like thing initialised from the _OPTIONS settings. + # But using '.' access in place of indexing + def __init__(self, **kwargs): + opts = _OPTIONS_TEMPLATE.copy() + opts.update(kwargs) + self._opts = opts + + def __getattr__(self, item): + return self._opts[item] + + def __setattr__(self, key, value): + if key == "_opts": + # Avoid the infinite loop when setting up "self.opt=opts". + super().__setattr__(key, value) + else: + self._opts[key] = value + + def update(self, **kwargs): + self._opts.update(kwargs) + + +class Mixin__timecoords__common(Mixin__nc_load_actions): + def _make_testcase_cdl( + self, + phenom_dims="_auto", # =get from time+period opts + phenom_coords="_auto", # =get from time+period opts + time_opts=None, + period_opts=None, + timedim_name="time", + perioddim_name="period", + ): + opt_t = None + opt_p = None + if time_opts is not None: + # Replace 'True' with options dict for 'time' options + opt_t = Opts(which="time", **time_opts) + if period_opts is not None: + # Replace 'True' with options dict for 'period' options + opt_p = Opts(which="period", **period_opts) + + # Define the 'standard' dimensions which we will create + # NB we don't necessarily *use* either of these + dims_and_lens = {timedim_name: 2, perioddim_name: 3} + dims_string = "\n".join( + [ + f" {name} = {length} ;" + for name, length in dims_and_lens.items() + ] + ) + + phenom_auto_dims = [] + phenom_auto_coords = [] + coord_variables_string = "" + data_string = "" + for opt in (opt_t, opt_p): + # Handle computed defaults and common info for both coord options. + if opt: + if opt.which not in ("time", "period"): + raise ValueError(f"unrecognised opt.which={opt.which}") + + # Do computed defaults. + if opt.stdname == "_auto_which": + if opt.which == "time": + opt.stdname = "time" + else: + assert opt.which == "period" + opt.stdname = "forecast_period" + if opt.varname == "_as_which": + opt.varname = opt.which + if opt.dimname == "_as_which": + opt.dimname = opt.which + if opt.units == "_auto_which": + if opt.which == "time": + opt.units = "hours since 2000-01-01" + else: + assert opt.which == "period" + opt.units = "hours" + + # Build 'auto' lists of phenom dims and (aux) coordinates. + if opt.in_phenomvar_dims: + phenom_auto_dims.append(opt.dimname) + if opt.in_phenomvar_coords: + phenom_auto_coords.append(opt.varname) + + # Add a definition of the coord variable. + coord_variables_string += f""" + double {opt.varname}({opt.dimname}) ; + {opt.varname}:standard_name = "{opt.stdname}" ; + {opt.varname}:units = "{opt.units}" ; +""" + # NOTE: we don't bother with an 'axis' property. + # We can probe the behaviour we need without that, because we + # are *not* testing the cf.py categorisation code, or the + # helper "build_xxx" routines. + + # Define coord-var data values (so it can be a dimension). + varname = opt.varname + if opt.values_all_zero: + # Use 'values_all_zero' to prevent a dim-var from + # identifying as a CFDimensionCoordinate (as it is + # non-monotonic). + dim_vals = [0.0] * dims_and_lens[opt.dimname] + else: + # "otherwise", assign an ascending sequence. + dim_vals = range(dims_and_lens[opt.dimname]) + dimvals_string = ", ".join(f"{val:0.1f}" for val in dim_vals) + data_string += f"\n {varname} = {dimvals_string} ;" + + if phenom_dims == "_auto": + phenom_dims = phenom_auto_dims + if not phenom_dims: + phenom_dims_string = "" + else: + phenom_dims_string = ", ".join(phenom_dims) + + if phenom_coords == "_auto": + phenom_coords = phenom_auto_coords + if not phenom_coords: + phenom_coords_string = "" + else: + phenom_coords_string = " ".join(phenom_coords) + phenom_coords_string = ( + " " + f'phenom:coordinates = "{phenom_coords_string}" ; ' + ) + + # Create a testcase with time dims + coords. + cdl_string = f""" +netcdf test {{ + dimensions: +{dims_string} + variables: + double phenom({phenom_dims_string}) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; +{phenom_coords_string} + +{coord_variables_string} + data: +{data_string} +}} +""" + if self.debug: + print("Testcase CDL string") + print(cdl_string) + print("----") + print("") + return cdl_string + + def check_result( + self, + cube, + time_is="dim", + period_is="missing", + time_name=None, + period_name=None, + time_class="_auto", + period_class="_auto", + ): + """ + Check presence of expected dim/aux-coords in the result cube. + + Both of 'time_is' and 'period_is' can take values 'dim', 'aux' or + 'missing'. + + """ + options = ("dim", "aux", "missing") + msg = f'Invalid "{{name}}" = {{opt}} : Not one of {options!r}.' + if time_is not in options: + raise ValueError(msg.format(name="time_is", opt=time_is)) + if period_is not in options: + raise ValueError(msg.format(name="period_is", opt=period_is)) + + # Get the facts we want to check + if time_name is None: + time_name = "time" + if period_name is None: + period_name = "forecast_period" + time_dimcos = cube.coords(time_name, dim_coords=True) + time_auxcos = cube.coords(time_name, dim_coords=False) + period_dimcos = cube.coords(period_name, dim_coords=True) + period_auxcos = cube.coords(period_name, dim_coords=False) + + if time_is == "dim": + self.assertEqual(len(time_dimcos), 1) + self.assertEqual(len(time_auxcos), 0) + elif time_is == "aux": + self.assertEqual(len(time_dimcos), 0) + self.assertEqual(len(time_auxcos), 1) + else: + self.assertEqual(len(time_dimcos), 0) + self.assertEqual(len(time_auxcos), 0) + + if period_is == "dim": + self.assertEqual(len(period_dimcos), 1) + self.assertEqual(len(period_auxcos), 0) + elif period_is == "aux": + self.assertEqual(len(period_dimcos), 0) + self.assertEqual(len(period_auxcos), 1) + else: + self.assertEqual(len(period_dimcos), 0) + self.assertEqual(len(period_auxcos), 0) + + +class Mixin__singlecoord__tests(Mixin__timecoords__common): + # Coordinate tests to be run for both 'time' and 'period' coordinate vars. + use_pyke = True + debug = False + # Set (in inheritors) to select time/period testing. + which = None + + def run_testcase(self, coord_dim_name=None, **opts): + """ + Specialise 'run_testcase' for single-coord 'time' or 'period' testing. + """ + which = self.which + assert which in ("time", "period") + + # Separate the 'Opt' keywords from "others" : others are passed + # directly to the parent routine, whereas 'Opt' ones are passed to + # 'time_opts' / 'period_opts' keys accordingly. + general_opts = {} + for key, value in list(opts.items()): + if key not in _OPTIONS_TEMPLATE.keys(): + del opts[key] + general_opts[key] = value + + if coord_dim_name is not None: + # Translate this into one of timedim_name/perioddim_name + general_opts[f"{which}dim_name"] = coord_dim_name + + period_opts = None + time_opts = None + if which == "time": + time_opts = opts + else: + period_opts = opts + + result = super().run_testcase( + time_opts=time_opts, period_opts=period_opts, **general_opts + ) + + return result + + def check_result(self, cube, coord_is="dim"): + """ + Specialise 'check_result' for single-coord 'time' or 'period' testing. + """ + # Pass generic 'coord_is' option to parent as time/period options. + which = self.which + assert which in ("time", "period") + + if which == "time": + time_is = coord_is + period_is = "missing" + else: + period_is = coord_is + time_is = "missing" + + super().check_result(cube, time_is=time_is, period_is=period_is) + + # + # Generic single-coordinate testcases. + # ( these are repeated for both 'time' and 'time_period' ) + # + + def test_dimension(self): + # Coord is a normal dimension --> dimcoord + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_build_coordinate_time + result = self.run_testcase() + self.check_result(result, "dim") + + def test_dimension_in_phenom_coords(self): + # Dimension coord also present in phenom:coords. + # Strictly wrong but a common error in datafiles : must tolerate. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_build_coordinate_time + result = self.run_testcase(in_phenomvar_coords=True) + self.check_result(result, "dim") + + def test_dim_nonmonotonic(self): + # Coord has all-zero values, which prevents it being a dimcoord. + # The rule has a special way of treating it as an aux coord + # -- even though it doesn't appear in the phenom coords. + # ( Done by the build_coord routine, so not really a rules issue). + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_build_coordinate_time + msg = "Failed to create.* dimension coordinate" + result = self.run_testcase(values_all_zero=True, warning=msg) + self.check_result(result, "aux") + + def test_dim_fails_typeident(self): + # The coord variable is identified as a CFDimensionCoordinate by cf.py, + # but having the wrong units causes it to fail the 'is_time' or + # 'is_period' test, so the 'provides_coord' rule fails to trigger. + # So it is built as a 'miscellaneous' dim-coord. + # N.B. this makes *no* practical difference, because a 'misc' dim + # coord is still a dim coord (albeit with bad units). + # ( N.B.#2 Not quite the same for lat/lon coords, where coord-specific + # 'build' rules always use a fixed standard-name ). + # Rules Triggered: + # 001 : fc_default + # 002 : fc_default_coordinate_(provide-phase) + # 003 : fc_build_coordinate_(miscellaneous) + result = self.run_testcase(units="1") + self.check_result(result, "dim") + + def test_aux(self): + # time/period is installed as an auxiliary coord. + # For this, rename both DIMENSIONS, so that the generated coords are + # not actually CF coordinates. + # For a valid case, we must *also* have a ref in phenom:coordinates + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate_time + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=True, + ) + self.check_result(result, "aux") + + def test_aux_not_in_phenom_coords(self): + # time/period is installed as an auxiliary coord, + # but we DIDN'T list it in phenom:coords -- otherwise as previous. + # Should have no result at all. + # Rules Triggered: + # 001 : fc_default + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=False, + ) # "should" be True for an aux-coord + self.check_result(result, "missing") + + def test_aux_fails_typeident(self): + # The coord variable is identified as a CFAuxiliaryCoordinate by cf.py, + # but having the wrong units causes it to fail the 'is_time' or + # 'is_period' test, so the 'provides_coord' rule fails to trigger. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # Again, though it builds as a 'miscellaneous' rather than a recognised + # specific coord type, it makes no practical difference. + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=True, + units="1", + ) + self.check_result(result, "aux") + + def test_aux_no_coordsref(self): + # The coord variable is identified as a CFAuxiliaryCoordinate by cf.py, + # but having the wrong units causes it to fail the 'is_time' or + # 'is_period' test. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # Again, though it builds as a 'miscellaneous' rather than a reocgnised + # specific coord type, it makes no practical difference. + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=True, + units="1", + ) + self.check_result(result, "aux") + + +class Test__time(Mixin__singlecoord__tests, tests.IrisTest): + # Run 'time' coord tests + which = "time" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +class Test__period(Mixin__singlecoord__tests, tests.IrisTest): + # Run 'time_period' coord tests + which = "time" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +class Mixin__dualcoord__tests(Mixin__timecoords__common, tests.IrisTest): + # Coordinate test for combination of 'time' and 'time_period'. + # Not strictly necessary, as handling is independent, but a handy check + # on typical usage. + use_pyke = True + debug = False + + def test_time_and_period(self): + # Test case with both 'time' and 'period', with separate dims. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_provides_coordinate_time_period + # 004 : fc_build_coordinate_time + # 005 : fc_build_coordinate_time_period + result = self.run_testcase(time_opts={}, period_opts={}) + self.check_result(result, time_is="dim", period_is="dim") + + +if __name__ == "__main__": + tests.main() From 6a821837ae552f1805247f12ea189ccd84958130 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 7 Jun 2021 00:10:38 +0100 Subject: [PATCH 20/53] Simplify and remove unused keys; check coord classes; test dim+aux shared dim. --- .../load_cube__activate/test__time_coords.py | 93 ++++++++++++------- 1 file changed, 57 insertions(+), 36 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py index 0102be1817..aac63982b7 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -12,31 +12,18 @@ """ import iris.tests as tests +from iris.coords import AuxCoord, DimCoord + from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( Mixin__nc_load_actions, ) -_OPTIONS_TEMPLATE = { - "which": "", # set to "something" - "stdname": "_auto_which", # default = time / time_period - "varname": "_as_which", # default = time / period - "dimname": "_as_which", - "in_phenomvar_dims": True, - "in_phenomvar_coords": False, # set for an aux-coord - "values_all_zero": False, # set to block CFDimensionVariable identity - "units": "_auto_which", # specific to time/period -} - - class Opts: - # A dict-like thing initialised from the _OPTIONS settings. - # But using '.' access in place of indexing + # A dict-like thing which provides '.' access in place of indexing. def __init__(self, **kwargs): - opts = _OPTIONS_TEMPLATE.copy() - opts.update(kwargs) - self._opts = opts + self._opts = kwargs def __getattr__(self, item): return self._opts[item] @@ -52,6 +39,19 @@ def update(self, **kwargs): self._opts.update(kwargs) +# Per-coord options settings for testcase definitions. +_COORD_OPTIONS_TEMPLATE = { + "which": "", # set to "something" + "stdname": "_auto_which", # default = time / time_period + "varname": "_as_which", # default = time / period + "dimname": "_as_which", + "in_phenomvar_dims": True, + "in_phenomvar_coords": False, # set for an aux-coord + "values_all_zero": False, # set to block CFDimensionVariable identity + "units": "_auto_which", # specific to time/period +} + + class Mixin__timecoords__common(Mixin__nc_load_actions): def _make_testcase_cdl( self, @@ -65,11 +65,13 @@ def _make_testcase_cdl( opt_t = None opt_p = None if time_opts is not None: - # Replace 'True' with options dict for 'time' options - opt_t = Opts(which="time", **time_opts) + # Replace 'True' with an options dict for 'time' options + opt_t = Opts(**_COORD_OPTIONS_TEMPLATE) + opt_t.update(which="time", **time_opts) if period_opts is not None: - # Replace 'True' with options dict for 'period' options - opt_p = Opts(which="period", **period_opts) + # Replace 'True' with an options dict for 'period' options + opt_p = Opts(**_COORD_OPTIONS_TEMPLATE) + opt_p.update(which="period", **period_opts) # Define the 'standard' dimensions which we will create # NB we don't necessarily *use* either of these @@ -180,16 +182,7 @@ def _make_testcase_cdl( print("") return cdl_string - def check_result( - self, - cube, - time_is="dim", - period_is="missing", - time_name=None, - period_name=None, - time_class="_auto", - period_class="_auto", - ): + def check_result(self, cube, time_is="dim", period_is="missing"): """ Check presence of expected dim/aux-coords in the result cube. @@ -205,10 +198,8 @@ def check_result( raise ValueError(msg.format(name="period_is", opt=period_is)) # Get the facts we want to check - if time_name is None: - time_name = "time" - if period_name is None: - period_name = "forecast_period" + time_name = "time" + period_name = "forecast_period" time_dimcos = cube.coords(time_name, dim_coords=True) time_auxcos = cube.coords(time_name, dim_coords=False) period_dimcos = cube.coords(period_name, dim_coords=True) @@ -234,6 +225,17 @@ def check_result( self.assertEqual(len(period_dimcos), 0) self.assertEqual(len(period_auxcos), 0) + # Also check expected built Coord types. + if time_is == "dim": + self.assertIsInstance(time_dimcos[0], DimCoord) + elif time_is == "aux": + self.assertIsInstance(time_auxcos[0], AuxCoord) + + if period_is == "dim": + self.assertIsInstance(period_dimcos[0], DimCoord) + elif period_is == "aux": + self.assertIsInstance(period_auxcos[0], AuxCoord) + class Mixin__singlecoord__tests(Mixin__timecoords__common): # Coordinate tests to be run for both 'time' and 'period' coordinate vars. @@ -254,7 +256,7 @@ def run_testcase(self, coord_dim_name=None, **opts): # 'time_opts' / 'period_opts' keys accordingly. general_opts = {} for key, value in list(opts.items()): - if key not in _OPTIONS_TEMPLATE.keys(): + if key not in _COORD_OPTIONS_TEMPLATE.keys(): del opts[key] general_opts[key] = value @@ -453,6 +455,25 @@ def test_time_and_period(self): result = self.run_testcase(time_opts={}, period_opts={}) self.check_result(result, time_is="dim", period_is="dim") + def test_time_dim_period_aux(self): + # Test case with both 'time' and 'period' sharing a dim. + # Rules Triggered: + # 001 : fc_default + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_build_auxiliary_coordinate_time_period + # 004 : fc_build_coordinate_time + result = self.run_testcase( + time_opts={}, + period_opts=dict( + dimname="time", + in_phenomvar_dims=False, + in_phenomvar_coords=True, + ), + ) + self.check_result(result, time_is="dim", period_is="aux") + if __name__ == "__main__": tests.main() From b59f0e777efa53c130835fc71376d6e26ac5dce0 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 7 Jun 2021 10:10:40 +0100 Subject: [PATCH 21/53] Simpler 'Opts' implementation. --- .../load_cube__activate/test__time_coords.py | 21 ++++++------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py index aac63982b7..0303aa5a51 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -20,23 +20,14 @@ ) -class Opts: +class Opts(dict): # A dict-like thing which provides '.' access in place of indexing. def __init__(self, **kwargs): - self._opts = kwargs - - def __getattr__(self, item): - return self._opts[item] - - def __setattr__(self, key, value): - if key == "_opts": - # Avoid the infinite loop when setting up "self.opt=opts". - super().__setattr__(key, value) - else: - self._opts[key] = value - - def update(self, **kwargs): - self._opts.update(kwargs) + # Init like a dict + super().__init__(**kwargs) + # Alias contents "self['key']", as properties "self.key" + # See: https://stackoverflow.com/a/14620633/2615050 + self.__dict__ = self # Per-coord options settings for testcase definitions. From 965ca8ad43a260fdb04081c5f2a82bba2655fa7c Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 12:28:08 +0100 Subject: [PATCH 22/53] Tidy testing classes a bit. --- .../load_cube/load_cube__activate/__init__.py | 15 +++++++- .../test__grid_mappings.py | 5 +-- .../load_cube__activate/test__time_coords.py | 37 +++++++++++-------- 3 files changed, 36 insertions(+), 21 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index a2f3d4ce31..234d34aad4 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -122,8 +122,12 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): engine = iris.fileformats._nc_load_rules.engine.Engine() iris.fileformats.netcdf.DEBUG = self.debug - # iris.fileformats.netcdf.LOAD_PYKE = False - return _load_cube(engine, cf, cf_var, nc_path) + + # Call the main translation function-under-test. + cube = _load_cube(engine, cf, cf_var, nc_path) + + # Always returns a single cube. + return cube def run_testcase(self, warning=None, **testcase_kwargs): """ @@ -134,13 +138,20 @@ def run_testcase(self, warning=None, **testcase_kwargs): """ cdl_path = str(self.temp_dirpath / "test.cdl") nc_path = cdl_path.replace(".cdl", ".nc") + cdl_string = self._make_testcase_cdl(**testcase_kwargs) + if self.debug: + print("CDL file content:") + print(cdl_string) + print("------\n") + if warning is None: context = self.assertNoWarningsRegexp() else: context = self.assertWarnsRegexp(warning) with context: cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) + if self.debug: print("\nCube:") print(cube) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index 41b288195e..41fe6c4892 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -201,10 +201,6 @@ def _make_testcase_cdl( xco = 100., 110., 120. ; }} """ - if self.debug: - print("File content:") - print(cdl_string) - print("------\n") return cdl_string def check_result( @@ -722,6 +718,7 @@ class Test__grid_mapping__pyke_rules( ): # Run grid-mapping tests with Pyke (rules) use_pyke = True + debug = False @classmethod def setUpClass(cls): diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py index 0303aa5a51..a75d474d68 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -166,11 +166,6 @@ def _make_testcase_cdl( {data_string} }} """ - if self.debug: - print("Testcase CDL string") - print(cdl_string) - print("----") - print("") return cdl_string def check_result(self, cube, time_is="dim", period_is="missing"): @@ -230,8 +225,6 @@ def check_result(self, cube, time_is="dim", period_is="missing"): class Mixin__singlecoord__tests(Mixin__timecoords__common): # Coordinate tests to be run for both 'time' and 'period' coordinate vars. - use_pyke = True - debug = False # Set (in inheritors) to select time/period testing. which = None @@ -402,9 +395,11 @@ def test_aux_no_coordsref(self): self.check_result(result, "aux") -class Test__time(Mixin__singlecoord__tests, tests.IrisTest): +class Test__time__withpyke(Mixin__singlecoord__tests, tests.IrisTest): # Run 'time' coord tests which = "time" + use_pyke = True + debug = False @classmethod def setUpClass(cls): @@ -415,9 +410,11 @@ def tearDownClass(cls): super().tearDownClass() -class Test__period(Mixin__singlecoord__tests, tests.IrisTest): +class Test__period__withpyke(Mixin__singlecoord__tests, tests.IrisTest): # Run 'time_period' coord tests - which = "time" + which = "period" + use_pyke = True + debug = False @classmethod def setUpClass(cls): @@ -428,13 +425,10 @@ def tearDownClass(cls): super().tearDownClass() -class Mixin__dualcoord__tests(Mixin__timecoords__common, tests.IrisTest): - # Coordinate test for combination of 'time' and 'time_period'. +class Mixin__dualcoord__tests(Mixin__timecoords__common): + # Coordinate tests for a combination of 'time' and 'time_period'. # Not strictly necessary, as handling is independent, but a handy check # on typical usage. - use_pyke = True - debug = False - def test_time_and_period(self): # Test case with both 'time' and 'period', with separate dims. # Rules Triggered: @@ -466,5 +460,18 @@ def test_time_dim_period_aux(self): self.check_result(result, time_is="dim", period_is="aux") +class Test__dualcoord_tests__withpyke(Mixin__dualcoord__tests, tests.IrisTest): + use_pyke = True + debug = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + if __name__ == "__main__": tests.main() From e3fd23d0fca837e60c85236362499d5097334832 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 12:29:40 +0100 Subject: [PATCH 23/53] Tests for hybrid vertical coords. --- .../load_cube/load_cube__activate/__init__.py | 11 + .../test__hybrid_formulae.py | 225 ++++++++++++++++++ 2 files changed, 236 insertions(+) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index 234d34aad4..8bebb42439 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -126,6 +126,17 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): # Call the main translation function-under-test. cube = _load_cube(engine, cf, cf_var, nc_path) + # Record on the cube, which hybrid coord elements were identified + # by the rules operation. + # Unlike the other translations, _load_cube does *not* convert this + # information into actual cube elements. That is instead done by + # `iris.fileformats.netcdf._load_aux_factory`. + # For rules testing, it is anyway more convenient to deal with the raw + # data, as each factory type has different validity requirements to + # build it, and none of that is relevant to the rules operation. + cube._formula_type_name = engine.requires.get("formula_type") + cube._formula_terms_byname = engine.requires.get("formula_terms") + # Always returns a single cube. return cube diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py new file mode 100644 index 0000000000..c1a325925f --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py @@ -0,0 +1,225 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Test rules activation relating to hybrid vertical coordinates. + +""" +import iris.tests as tests + +import iris.fileformats._nc_load_rules.helpers as hh +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) + + +class Mixin__formulae_tests(Mixin__nc_load_actions): + def _make_testcase_cdl(self, formula_root_name=None, term_names=None): + """Construct a testcase CDL for data with hybrid vertical coords.""" + if formula_root_name is None: + formula_root_name = "atmosphere_hybrid_height_coordinate" + if term_names is None: + term_names = hh.CF_COORD_VERTICAL.get(formula_root_name) + if term_names is None: + # unsupported type : just make something up + term_names = ["term1"] + + terms_string = "" + phenom_coord_names = ["vert"] # always include the root variable + formula_term_strings = [] + for term_name in term_names: + term_varname = "v_" + term_name + phenom_coord_names.append(term_varname) + formula_term_strings.append(f"{term_name}: {term_varname}") + terms_string += f""" + double {term_varname}(h) ; + {term_varname}:long_name = "{term_name}_long_name" ; + {term_varname}:units = "m" ; +""" + + # remove the extra initial space from the formula terms string + phenom_coords_string = " ".join(phenom_coord_names) + formula_terms_string = " ".join(formula_term_strings) + # Create the main result string. + cdl_str = f""" +netcdf test {{ +dimensions: + h = 2 ; +variables: + double phenom(h) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:coordinates = "{phenom_coords_string}" ; + double vert(h) ; + vert:standard_name = "{formula_root_name}" ; + vert:long_name = "hybrid_vertical" ; + vert:units = "m" ; + vert:formula_terms = "{formula_terms_string}" ; +{terms_string} +}} +""" + return cdl_str + + def check_result(self, cube, factory_type="_auto", formula_terms="_auto"): + """Check the result of a cube load with a hybrid vertical coord.""" + if factory_type == "_auto": + # replace with our 'default', which is hybrid-height. + # N.B. 'None' is different: it means expect *no* factory. + factory_type = "atmosphere_hybrid_height_coordinate" + self.assertEqual(cube._formula_type_name, factory_type) + + if formula_terms == "_auto": + # Set default terms-expected, according to the expected factory + # type. + if factory_type is None: + # If no factory, expect no identified terms. + formula_terms = [] + else: + # Expect the correct ones defined for the factory type. + formula_terms = hh.CF_COORD_VERTICAL[factory_type] + + # Compare the formula_terms list with the 'expected' ones. + # N.B. first make the 'expected' list lower case, as the lists in + # hh.CF_COORD_VERTICAL include uppercase, but rules outputs don't. + formula_terms = [term.lower() for term in formula_terms] + + # N.B. the terms dictionary can be missing, if there were none + actual_terms = cube._formula_terms_byname or {} + self.assertEqual(sorted(formula_terms), sorted(actual_terms.keys())) + + # Check that there is an aux-coord of the expected name for each term + for var_name in actual_terms.values(): + coords = cube.coords(var_name=var_name, dim_coords=False) + self.assertEqual(len(coords), 1) + + # + # Actual testcase routines + # + + def test_basic_hybridheight(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 005 : fc_build_auxiliary_coordinate + # 006 : fc_build_auxiliary_coordinate + # 007 : fc_formula_type_atmosphere_hybrid_sigma_pressure_coordinate + # 008 : fc_formula_terms + # 009 : fc_formula_terms + # 010 : fc_formula_terms + # 011 : fc_formula_terms + result = self.run_testcase() + self.check_result(result) + + def test_missing_term(self): + # Check behaviour when a term is missing. + # For the test, omit "orography", which is common in practice. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 005 : fc_formula_type_atmosphere_hybrid_height_coordinate + # 006 : fc_formula_terms + # 007 : fc_formula_terms + result = self.run_testcase( + term_names=["a", "b"] # missing the 'orog' term + ) + self.check_result(result, formula_terms=["a", "b"]) + + def test_no_terms(self): + # Check behaviour when *all* terms are missing. + # N.B. for any _actual_ type, this is probably invalid and would fail? + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + result = self.run_testcase( + formula_root_name="atmosphere_hybrid_height_coordinate", + term_names=[], + ) + # This does *not* trigger + # 'fc_formula_type_atmosphere_hybrid_height_coordinate' + # This is because, within the 'assert_case_specific_facts' routine, + # formula_roots are only recognised by scanning the identified + # formula_terms. + self.check_result(result, factory_type=None) + + def test_unrecognised_verticaltype(self): + # Set the root variable name to something NOT a recognised hybrid type. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 005 : fc_formula_terms + # 006 : fc_formula_terms + result = self.run_testcase( + formula_root_name="unknown", term_names=["a", "b"] + ) + # Check that it picks up the terms, but *not* the factory root coord, + # which is simply discarded. + self.check_result(result, factory_type=None, formula_terms=["a", "b"]) + + +# Add in tests methods to exercise each (supported) vertical coordinate type +# individually. +# NOTE: hh.CF_COORD_VERTICAL lists all the valid types, but we don't yet +# support all of them. +_SUPPORTED_FORMULA_TYPES = ( + # NOTE: omit "atmosphere_hybrid_height_coordinate" : our basic testcase + "atmosphere_hybrid_sigma_pressure_coordinate", + "ocean_sigma_z_coordinate", + "ocean_sigma_coordinate", + "ocean_s_coordinate", + "ocean_s_coordinate_g1", + "ocean_s_coordinate_g2", +) +for hybrid_type in _SUPPORTED_FORMULA_TYPES: + + def construct_inner_func(hybrid_type): + term_names = hh.CF_COORD_VERTICAL[hybrid_type] + + def inner(self): + result = self.run_testcase( + formula_root_name=hybrid_type, term_names=term_names + ) + self.check_result( + result, factory_type=hybrid_type, formula_terms=term_names + ) + + return inner + + # Note: use an intermediate function to generate each test method, simply to + # generate a new local variable for 'hybrid_type' on each iteration. + # Otherwise all the test methods will refer to the *same* 'hybrid_type' + # variable, i.e. the loop variable, which does not work ! + method_name = f"test_{hybrid_type}_coord" + setattr( + Mixin__formulae_tests, method_name, construct_inner_func(hybrid_type) + ) + + +class Test__formulae__withpyke(Mixin__formulae_tests, tests.IrisTest): + use_pyke = True + debug = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +if __name__ == "__main__": + tests.main() From 2a6cfd8d5f6bd736e8ab884b8357d24450707f41 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 12:47:19 +0100 Subject: [PATCH 24/53] Small review changes. --- lib/iris/fileformats/_nc_load_rules/actions.py | 10 ++-------- lib/iris/fileformats/_nc_load_rules/helpers.py | 4 ++-- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index bfe7999789..c4f3990a85 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -48,8 +48,8 @@ from functools import wraps -def convert_actionname_to_rulename(func_name): - # Given the name of an action-func, return the name of the rule. +def _default_rulenamesfunc(func_name): + # A simple default function to deduce the rules-name from an action-name. funcname_prefix = "action_" rulename_prefix = "fc_" # To match existing behaviours rule_name = func_name @@ -60,12 +60,6 @@ def convert_actionname_to_rulename(func_name): return rule_name -def _default_rulenamesfunc(func_name): - # A simple default function to deduce the rules-name from an action-name. - rule_name = convert_actionname_to_rulename(func_name) - return rule_name - - def action_function(func): # Wrap an action function with some standard behaviour. # Notably : engages with the rules logging process. diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 0ac1cb7472..ce7a194b35 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -All the pure-Python 'helper' functions which previously included in the Pyke -rules database. +All the pure-Python 'helper' functions which were previously included in the +Pyke rules database 'fc_rules_cf.krb'. Initially these haven't changed. The new rules approach is still calling most of them. From e776618ccd4cc0a55177c0b55ec4a9ee9110750d Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 15:40:04 +0100 Subject: [PATCH 25/53] Regularise per-test records of rules triggered. --- .../load_cube__activate/test__additional.py | 13 +++++++------ .../load_cube__activate/test__time_coords.py | 5 ++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py index ce5b3ad7b7..957c736501 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py @@ -24,7 +24,8 @@ class Test__additional(Mixin__grid_mapping, tests.IrisTest): # Run grid-mapping tests with non-Pyke (actions) - use_pyke = False + use_pyke = True + debug = False @classmethod def setUpClass(cls): @@ -39,11 +40,11 @@ def test_nondim_lats(self): # # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(latitude) - # 006 : fc_build_coordinate_(longitude) + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude + # 006 : fc_build_coordinate_longitude # NOTES: # in terms of rule triggers, this is not distinct from a normal case # - but the latitude is now an aux-coord. diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py index a75d474d68..fa010f446d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -326,9 +326,8 @@ def test_dim_fails_typeident(self): # ( N.B.#2 Not quite the same for lat/lon coords, where coord-specific # 'build' rules always use a fixed standard-name ). # Rules Triggered: - # 001 : fc_default - # 002 : fc_default_coordinate_(provide-phase) - # 003 : fc_build_coordinate_(miscellaneous) + # #001 : fc_default + # #002 : fc_default_coordinate result = self.run_testcase(units="1") self.check_result(result, "dim") From 7f92f7b73361eb758997cc8eb4bf71055b6a2d3e Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 16:43:31 +0100 Subject: [PATCH 26/53] Added tests for auxiliary lat+lon coords. --- .../test__grid_mappings.py | 132 ++++++++++++++++-- 1 file changed, 123 insertions(+), 9 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index 41fe6c4892..4995cf5a34 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -35,6 +35,8 @@ def _make_testcase_cdl( yco_name=None, xco_units=None, yco_units=None, + xco_is_dim=True, + yco_is_dim=True, ): """ Create a CDL string for a testcase. @@ -92,6 +94,30 @@ def _make_testcase_cdl( if yco_units is None: yco_units = yco_units_default + phenom_auxcoord_names = [] + if xco_is_dim: + # xdim has same name as xco, making xco a dim-coord + xdim_name = "xco" + else: + # use alternate dim-name, and put xco on the 'coords' list + xdim_name = "xdim_altname" + phenom_auxcoord_names.append("xco") + if yco_is_dim: + # ydim has same name as yco, making yco a dim-coord + ydim_name = "yco" # This makes the X coord a dim-coord + else: + # use alternate dim-name, and put yco on the 'coords' list + ydim_name = "ydim_altname" # This makes the X coord a dim-coord + phenom_auxcoord_names.append("yco") + # Build a 'phenom:coords' string if needed. + if phenom_auxcoord_names: + phenom_coords_string = " ".join(phenom_auxcoord_names) + phenom_coords_string = f""" + phenom:coordinates = "{phenom_coords_string}" ; +""" + else: + phenom_coords_string = "" + grid_mapping_name = "grid" # Options can override the gridvar name and properties. g_varname = gridmapvar_name @@ -180,18 +206,19 @@ def _make_testcase_cdl( cdl_string = f""" netcdf test {{ dimensions: - yco = 2 ; - xco = 3 ; + {ydim_name} = 2 ; + {xdim_name} = 3 ; variables: - double phenom(yco, xco) ; + double phenom({ydim_name}, {xdim_name}) ; phenom:standard_name = "air_temperature" ; phenom:units = "K" ; phenom:grid_mapping = "grid" ; - double yco(yco) ; +{phenom_coords_string} + double yco({ydim_name}) ; yco:axis = "Y" ; yco:units = "{yco_units}" ; yco:standard_name = "{yco_name}" ; - double xco(xco) ; + double xco({xdim_name}) ; xco:axis = "X" ; xco:units = "{xco_units}" ; xco:standard_name = "{xco_name}" ; @@ -211,6 +238,7 @@ def check_result( cube_no_xycoords=False, xco_no_cs=False, # N.B. no effect if cube_no_cs is True yco_no_cs=False, # N.B. no effect if cube_no_cs is True + xco_is_aux=False, yco_is_aux=False, xco_stdname=True, yco_stdname=True, @@ -225,12 +253,16 @@ def check_result( x_coords = cube.coords(dimensions=(1,)) y_coords = cube.coords(dimensions=(0,)) + expected_dim_coords = [] + expected_aux_coords = [] if yco_is_aux: - expected_dim_coords = x_coords - expected_aux_coords = y_coords + expected_aux_coords += y_coords else: - expected_dim_coords = x_coords + y_coords - expected_aux_coords = [] + expected_dim_coords += y_coords + if xco_is_aux: + expected_aux_coords += x_coords + else: + expected_dim_coords += x_coords self.assertEqual( set(expected_dim_coords), set(cube.coords(dim_coords=True)) @@ -748,5 +780,87 @@ def tearDownClass(cls): super().tearDownClass() +class Mixin__aux_latlons(Mixin__grid_mapping): + # Testcases for translating auxiliary latitude+longitude variables + + def test_aux_lon(self): + # Change the name of xdim, and put xco on the coords list. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_build_auxiliary_coordinate_longitude + # 005 : fc_build_coordinate_latitude + result = self.run_testcase(xco_is_dim=False) + self.check_result(result, xco_is_aux=True, xco_no_cs=True) + + def test_aux_lat(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_auxiliary_coordinate_latitude + # 005 : fc_build_coordinate_longitude + result = self.run_testcase(yco_is_dim=False) + self.check_result(result, yco_is_aux=True, yco_no_cs=True) + + def test_aux_lat_and_lon(self): + # When *both* are aux, the grid-mapping is discarded. + # - as in this case there are then no dim-coords to reference it. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_build_auxiliary_coordinate_latitude + # 004 : fc_build_auxiliary_coordinate_longitude + result = self.run_testcase(xco_is_dim=False, yco_is_dim=False) + self.check_result( + result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=True + ) + + def test_aux_lon_rotated(self): + # Same but with rotated-style lat + lon coords. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_build_auxiliary_coordinate_longitude_rotated + # 005 : fc_build_coordinate_latitude_rotated + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_is_dim=False, + ) + self.check_result(result, xco_is_aux=True, xco_no_cs=True) + + def test_aux_lat_rotated(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_build_auxiliary_coordinate_longitude_rotated + # 005 : fc_build_coordinate_latitude_rotated + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_is_dim=False, + ) + self.check_result(result, xco_is_aux=True, xco_no_cs=True) + + +class Test__aux_latlons__pyke_rules(Mixin__aux_latlons, tests.IrisTest): + # Run aux-latlons tests with Pyke (rules) + use_pyke = True + debug = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + if __name__ == "__main__": tests.main() From 3c00044645bf8590d8cae88fcd64cb2ec96886ec Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 21:27:45 +0100 Subject: [PATCH 27/53] Tests for remaining miscellaneous rules. --- .../test__miscellaneous.py | 216 ++++++++++++++++++ 1 file changed, 216 insertions(+) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py new file mode 100644 index 0000000000..d41b19e108 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py @@ -0,0 +1,216 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Tests for rules activation relating to some isolated aspects : + * UKMO um-specific metadata + * label coordinates + * cell measures + * ancillary variables + +""" +import iris.tests as tests + +from iris.coords import AuxCoord, CellMeasure, AncillaryVariable +from iris.fileformats.pp import STASH + +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) + + +class Mixin__ukmo_attributes(Mixin__nc_load_actions): + # Tests for handling of the special UM-specific data-var attributes. + def _make_testcase_cdl(self, **add_attrs): + phenom_attrs_string = "" + for key, value in add_attrs.items(): + phenom_attrs_string += f""" + phenom:{key} = "{value}" ; +""" + + cdl_string = f""" +netcdf test {{ + dimensions: + xdim = 2 ; + variables: + double phenom(xdim) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; +{phenom_attrs_string} +}} +""" + return cdl_string + + def check_result(self, cube, stashcode=None, processflags=None): + cube_stashattr = cube.attributes.get("STASH") + cube_processflags = cube.attributes.get("ukmo__process_flags") + + if stashcode is not None: + self.assertIsInstance(cube_stashattr, STASH) + self.assertEqual(str(stashcode), str(cube_stashattr)) + else: + self.assertIsNone(cube_stashattr) + + if processflags is not None: + self.assertIsInstance(cube_processflags, tuple) + self.assertEqual(set(cube_processflags), set(processflags)) + else: + self.assertIsNone(cube_processflags) + + # + # Testcase routines + # + stashcode = "m01s02i034" # Just one valid STASH msi string for testing + + def test_stash(self): + cube = self.run_testcase(um_stash_source=self.stashcode) + self.check_result(cube, stashcode=self.stashcode) + + def test_stash_altname(self): + cube = self.run_testcase(ukmo__um_stash_source=self.stashcode) + self.check_result(cube, stashcode=self.stashcode) + + def test_stash_empty(self): + msg = "Expected STASH code MSI string" + with self.assertRaisesRegex(ValueError, msg): + self.run_testcase(ukmo__um_stash_source="") + + def test_stash_invalid(self): + msg = "Expected STASH code MSI string" + with self.assertRaisesRegex(ValueError, msg): + self.run_testcase(ukmo__um_stash_source="XXX") + + def test_processflags_single(self): + cube = self.run_testcase(ukmo__process_flags="this") + self.check_result(cube, processflags=["this"]) + + def test_processflags_multi_with_underscores(self): + flags_testinput = "this that_1 the_other_one x" + flags_expectresult = ["this", "that 1", "the other one", "x"] + cube = self.run_testcase(ukmo__process_flags=flags_testinput) + self.check_result(cube, processflags=flags_expectresult) + + def test_processflags_empty(self): + cube = self.run_testcase(ukmo__process_flags="") + expected_result = [""] # May seem odd, but that's what it does. + self.check_result(cube, processflags=expected_result) + + +class Test__ukmo_attributes__withpyke(Mixin__ukmo_attributes, tests.IrisTest): + use_pyke = True + + +class Mixin__labels_cellmeasures_ancils(Mixin__nc_load_actions): + # Tests for some simple rules that translate facts directly into cube data, + # with no alternative actions, complications or failure modes to test. + def _make_testcase_cdl( + self, + include_label=False, + include_cellmeasure=False, + include_ancil=False, + ): + + phenom_extra_attrs_string = "" + extra_vars_string = "" + + if include_label: + phenom_extra_attrs_string += """ + phenom:coordinates = "v_label" ; +""" + extra_vars_string += """ + char v_label(xdim, strdim) ; + v_label:long_name = "string data" ; +""" + + if include_cellmeasure: + # One simple case : a valid link + a variable definition. + phenom_extra_attrs_string += """ + phenom:cell_measures = "area: v_cellm" ; +""" + extra_vars_string += """ + double v_cellm(xdim) ; + v_cellm:long_name = "cell areas" ; +""" + + if include_ancil: + # One simple case : a valid link + a variable definition. + phenom_extra_attrs_string += """ + phenom:ancillary_variables = "v_ancil" ; +""" + extra_vars_string += """ + double v_ancil(xdim) ; + v_ancil:long_name = "ancillary values" ; +""" + cdl_string = f""" + netcdf test {{ + dimensions: + xdim = 2 ; + strdim = 5 ; + variables: + double phenom(xdim) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; +{phenom_extra_attrs_string} +{extra_vars_string} + }} + """ + return cdl_string + + def check_result( + self, + cube, + expect_label=False, + expect_cellmeasure=False, + expect_ancil=False, + ): + label_coords = cube.coords(var_name="v_label") + if expect_label: + self.assertEqual(len(label_coords), 1) + (coord,) = label_coords + self.assertIsInstance(coord, AuxCoord) + self.assertEqual(coord.dtype.kind, "U") + else: + self.assertEqual(len(label_coords), 0) + + cell_measures = cube.cell_measures() + if expect_cellmeasure: + self.assertEqual(len(cell_measures), 1) + (cellm,) = cell_measures + self.assertIsInstance(cellm, CellMeasure) + else: + self.assertEqual(len(cell_measures), 0) + + ancils = cube.ancillary_variables() + if expect_ancil: + self.assertEqual(len(ancils), 1) + (ancil,) = ancils + self.assertIsInstance(ancil, AncillaryVariable) + else: + self.assertEqual(len(ancils), 0) + + def test_label(self): + cube = self.run_testcase(include_label=True) + self.check_result(cube, expect_label=True) + + def test_ancil(self): + cube = self.run_testcase(include_ancil=True) + self.check_result(cube, expect_ancil=True) + + def test_cellmeasure(self): + cube = self.run_testcase(include_cellmeasure=True) + self.check_result(cube, expect_cellmeasure=True) + + +class Test__labels_cellmeasures_ancils__withpyke( + Mixin__labels_cellmeasures_ancils, tests.IrisTest +): + use_pyke = True + + +if __name__ == "__main__": + tests.main() From 1cc4c7d7741f21f09f5d91a739579c425a454839 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 10:02:02 +0100 Subject: [PATCH 28/53] Review: fix typos. --- .../load_cube/load_cube__activate/test__grid_mappings.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index 4995cf5a34..5a11a2cc88 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -100,15 +100,18 @@ def _make_testcase_cdl( xdim_name = "xco" else: # use alternate dim-name, and put xco on the 'coords' list + # This makes the X coord an aux-coord xdim_name = "xdim_altname" phenom_auxcoord_names.append("xco") if yco_is_dim: # ydim has same name as yco, making yco a dim-coord - ydim_name = "yco" # This makes the X coord a dim-coord + ydim_name = "yco" # This makes the Y coord a dim-coord else: # use alternate dim-name, and put yco on the 'coords' list - ydim_name = "ydim_altname" # This makes the X coord a dim-coord + # This makes the Y coord an aux-coord + ydim_name = "ydim_altname" phenom_auxcoord_names.append("yco") + # Build a 'phenom:coords' string if needed. if phenom_auxcoord_names: phenom_coords_string = " ".join(phenom_auxcoord_names) From e50cdd58d64d0b0b24fc1966982332b03887f025 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 17:39:43 +0100 Subject: [PATCH 29/53] Add testing option to compare pyke and nonpyke loads. --- .../load_cube/load_cube__activate/__init__.py | 100 ++++++++++++++---- 1 file changed, 82 insertions(+), 18 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index 8bebb42439..6946c2819e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -19,6 +19,8 @@ import subprocess import tempfile +import numpy as np + from iris.fileformats.cf import CFReader import iris.fileformats.netcdf from iris.fileformats.netcdf import _load_cube @@ -82,6 +84,9 @@ class Mixin__nc_load_actions: # TODO: ?possibly? remove when development is complete debug = False + # whether to perform action in both ways and compare results. + compare_pyke_nonpyke = True + @classmethod def setUpClass(cls): # # Control which testing method we are applying. @@ -116,29 +121,88 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): cf_var = list(cf.cf_group.data_variables.values())[0] cf_var = cf.cf_group.data_variables["phenom"] - if self.use_pyke: - engine = iris.fileformats.netcdf._pyke_kb_engine_real() - else: - engine = iris.fileformats._nc_load_rules.engine.Engine() + do_pyke = self.use_pyke or self.compare_pyke_nonpyke + do_nonpyke = not self.use_pyke or self.compare_pyke_nonpyke + if do_pyke: + pyke_engine = iris.fileformats.netcdf._pyke_kb_engine_real() + if do_nonpyke: + nonpyke_engine = iris.fileformats._nc_load_rules.engine.Engine() iris.fileformats.netcdf.DEBUG = self.debug - # Call the main translation function-under-test. - cube = _load_cube(engine, cf, cf_var, nc_path) - - # Record on the cube, which hybrid coord elements were identified - # by the rules operation. - # Unlike the other translations, _load_cube does *not* convert this - # information into actual cube elements. That is instead done by - # `iris.fileformats.netcdf._load_aux_factory`. - # For rules testing, it is anyway more convenient to deal with the raw - # data, as each factory type has different validity requirements to - # build it, and none of that is relevant to the rules operation. - cube._formula_type_name = engine.requires.get("formula_type") - cube._formula_terms_byname = engine.requires.get("formula_terms") + # Call the main translation function to load a single cube. + def load_single_cube(engine): + # _load_cube establishes per-cube facts, activates rules and + # produces an actual cube. + cube = _load_cube(engine, cf, cf_var, nc_path) + + # Also Record, on the cubes, which hybrid coord elements were identified + # by the rules operation. + # Unlike the other translations, _load_cube does *not* convert this + # information into actual cube elements. That is instead done by + # `iris.fileformats.netcdf._load_aux_factory`. + # For rules testing, it is anyway more convenient to deal with the raw + # data, as each factory type has different validity requirements to + # build it, and none of that is relevant to the rules operation. + cube._formula_type_name = engine.requires.get("formula_type") + cube._formula_terms_byname = engine.requires.get("formula_terms") + + return cube + + if do_pyke: + pyke_cube = load_single_cube(pyke_engine) + if do_nonpyke: + nonpyke_cube = load_single_cube(nonpyke_engine) + + # If requested, directly compare the pyke and non-pyke outputs. + if self.compare_pyke_nonpyke: + # Compare the loaded cubes from both engines. + print("\nPYKE-NONPYKE COMPARE") + + # First zap cube-data, as masked data does not compare well. + def unmask_cube(cube): + # preserve the original, we're going to realise.. + cube = cube.copy() + if isinstance(cube.data, np.ma.MaskedArray): + cube.data = cube.data.filled(0) + return cube + + pyke_cube_copy = unmask_cube(pyke_cube) + nonpyke_cube_copy = unmask_cube(nonpyke_cube) + if self.debug: + if nonpyke_cube_copy != pyke_cube_copy: + + def show_cube(cube): + result = str(cube) + result += "\n--coords--" + for coord in cube.coords(): + result += "\n " + str(coord) + result += "\n--attributes--" + if not cube.attributes: + result += "\n (none)" + else: + for key, value in cube.attributes.items(): + result += f"\n {key}: {value}" + return result + + print("\nPyke/nonpyke mismatch.") + print("Pyke cube:\n----") + print(show_cube(pyke_cube)) + print() + print("NONPyke cube:\n----") + print(show_cube(nonpyke_cube)) + print("") + else: + self.assertEqual(pyke_cube_copy, nonpyke_cube_copy) + + # Return the right thing, whether we did 'both' or not + if self.use_pyke: + result_cube = pyke_cube + else: + result_cube = nonpyke_cube # Always returns a single cube. - return cube + return result_cube def run_testcase(self, warning=None, **testcase_kwargs): """ From 2bfeff6e1c3311711c3951d42d9b60a8c419bfd7 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 17:44:59 +0100 Subject: [PATCH 30/53] Fixes to grid-mapping/dimcoord actions: passing all compare-tests. --- .../fileformats/_nc_load_rules/actions.py | 113 +++++++++++++++--- 1 file changed, 94 insertions(+), 19 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index c4f3990a85..07bd407940 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -182,17 +182,18 @@ def action_provides_grid_mapping(engine, gridmapping_fact): def action_provides_coordinate(engine, dimcoord_fact): (var_name,) = dimcoord_fact - # Identify the coord type - # N.B. *only* to "name" the rule, for debug : no functional need. + # Identify the "type" of a coordinate variable coord_type = None - if hh.is_latitude(engine, var_name): - coord_type = "latitude" - elif hh.is_longitude(engine, var_name): - coord_type = "longitude" - elif hh.is_rotated_latitude(engine, var_name): + # NOTE: must test for rotated cases *first*, as 'is_longitude' and + # 'is_latitude' functions also accept rotated cases. + if hh.is_rotated_latitude(engine, var_name): coord_type = "rotated_latitude" elif hh.is_rotated_longitude(engine, var_name): coord_type = "rotated_longitude" + elif hh.is_latitude(engine, var_name): + coord_type = "latitude" + elif hh.is_longitude(engine, var_name): + coord_type = "longitude" elif hh.is_time(engine, var_name): coord_type = "time" elif hh.is_time_period(engine, var_name): @@ -207,7 +208,7 @@ def action_provides_coordinate(engine, dimcoord_fact): # N.B. in the original rules, this does *not* trigger separate # 'provides' and 'build' phases : there is just a single # 'fc_default_coordinate' rule. - # Rationalise this for now by making it like the others. + # Rationalise this for now by making it more like the others. # FOR NOW: ~matching old code, but they could *all* be simplified. # TODO: combine 2 operation into 1 for ALL of these. coord_type = "miscellaneous" @@ -228,14 +229,14 @@ def action_provides_coordinate(engine, dimcoord_fact): # (@1) an (optional) fixed standard-name for the coordinate, or None # If None, the coordinate name is copied from the source variable _coordtype_to_gridtype_coordname = { - "latitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LAT), - "longitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LON), + "latitude": ("latlon", hh.CF_VALUE_STD_NAME_LAT), + "longitude": ("latlon", hh.CF_VALUE_STD_NAME_LON), "rotated_latitude": ( - "rotated_latitude_longitude", + "rotated", hh.CF_VALUE_STD_NAME_GRID_LAT, ), "rotated_longitude": ( - "rotated_latitude_longitude", + "rotated", hh.CF_VALUE_STD_NAME_GRID_LON, ), "projection_x": ("projected", hh.CF_VALUE_STD_NAME_PROJ_X), @@ -251,20 +252,94 @@ def action_build_dimension_coordinate(engine, providescoord_fact): coord_type, var_name = providescoord_fact cf_var = engine.cf_var.cf_group[var_name] rule_name = f"fc_build_coordinate_({coord_type})" - grid_type, coord_name = _coordtype_to_gridtype_coordname[coord_type] - coord_system = None - if grid_type is not None: - # If a type is identified with a grid, use the coordinate system + coord_grid_class, coord_name = _coordtype_to_gridtype_coordname[coord_type] + if coord_grid_class is None: + # Coordinates not identified with a specific grid-type class (latlon, + # rotated or projected) are always built, but can have no coord-system. + coord_system = None # no coord-system can be used + succeed = True + else: + grid_classes = ("latlon", "rotated", "projected") + assert coord_grid_class in grid_classes + # If a coord is of a type identified with a grid, we may have a + # coordinate system (i.e. a valid grid-mapping). # N.B. this requires each grid-type identification to validate the # coord var (e.g. "is_longitude"). # Non-conforming lon/lat/projection coords will be classed as # dim-coords by cf.py, but 'action_provides_coordinate' will give them # a coord-type of 'miscellaneous' : hence, they have no coord-system. coord_system = engine.cube_parts.get("coordinate_system") - hh.build_dimension_coordinate( - engine, cf_var, coord_name=coord_name, coord_system=coord_system - ) + # Translate the specific grid-mapping type to a grid-class + if coord_system is None: + succeed = True + cs_gridclass = None + else: + gridtypes_factlist = engine.fact_list("grid-type") + (gridtypes_fact,) = gridtypes_factlist # only 1 fact + (cs_gridtype,) = gridtypes_fact # fact contains 1 term + # (i.e. one of latlon/rotated/prjected, like coord_grid_class) + if cs_gridtype == "latitude_longitude": + cs_gridclass = "latlon" + elif cs_gridtype == "rotated_latitude_longitude": + cs_gridclass = "rotated" + else: + # Other specific projections + assert cs_gridtype is not None + cs_gridclass = "projected" + + assert cs_gridclass in grid_classes + (None,) + + if coord_grid_class == "latlon": + if cs_gridclass == "latlon": + succeed = True + elif cs_gridclass is None: + succeed = True + rule_name += "(no-cs)" + elif cs_gridclass == "rotated": + # We disallow this case + succeed = False + else: + assert cs_gridclass == "projected" + # succeed, no error, but discards the coord-system + # TODO: could issue a warning in this case ? + succeed = True + coord_system = None + rule_name += "(no-cs : discarded projected cs)" + elif coord_grid_class == "rotated": + # For rotated, we also accept no coord-system, but do *not* accept + # the presence of an unsuitable type. + if cs_gridclass == "rotated": + succeed = True + rule_name += "(rotated)" + elif cs_gridclass is None: + succeed = True + rule_name += "(rotated no-cs)" + elif cs_gridclass == "latlon": + # We allow this, but discard the CS + succeed = False + rule_name += "(FAILED rotated with latlon-cs)" + else: + assert cs_gridclass == "projected" + succeed = True + coord_system = None + rule_name += "(rotated : discarded projected cs)" + elif coord_grid_class == "projected": + # In this case, can *only* build a coord at all if there is a + # coord-system of the correct class (i.e. 'projected'). + succeed = cs_gridclass == "projected" + if not succeed: + rule_name += "(FAILED projected coord with non-projected cs)" + else: + msg = ( + f'Unexpected coord grid-class "{coord_grid_class}" ' + f"for coord {var_name}." + ) + raise ValueError(msg) + if succeed: + hh.build_dimension_coordinate( + engine, cf_var, coord_name=coord_name, coord_system=coord_system + ) return rule_name From 72058b068f8a915cccc3a08ed486d1ed427858f5 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 18:52:50 +0100 Subject: [PATCH 31/53] Actions for remaining 'miscellaneous' behaviour: All tests passing, including pyke == nonpyke checks. --- .../fileformats/_nc_load_rules/actions.py | 88 ++++++++++++++++++- .../load_cube/load_cube__activate/__init__.py | 2 +- 2 files changed, 88 insertions(+), 2 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 07bd407940..97141c4e82 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -45,6 +45,7 @@ """ from . import helpers as hh +import iris.fileformats.pp as pp from functools import wraps @@ -381,7 +382,72 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): return rule_name +@action_function +def action_ukmo_stash(engine): + rule_name = "fc_attribute_ukmo__um_stash_source" + var = engine.cf_var + attr_name = "ukmo__um_stash_source" + attr_value = getattr(var, attr_name, None) + if attr_value is None: + attr_altname = "um_stash_source" # legacy form + attr_value = getattr(var, attr_altname, None) + if attr_value is None: + rule_name += "(NOT-TRIGGERED)" + else: + # No helper routine : just do it + engine.cube.attributes["STASH"] = pp.STASH.from_msi(attr_value) + + return rule_name + + +@action_function +def action_ukmo_processflags(engine): + rule_name = "fc_attribute_ukmo__process_flags" + var = engine.cf_var + attr_name = "ukmo__process_flags" + attr_value = getattr(var, attr_name, None) + if attr_value is None: + rule_name += "(NOT-TRIGGERED)" + else: + # No helper routine : just do it + flags = [x.replace("_", " ") for x in attr_value.split(" ")] + engine.cube.attributes["ukmo__process_flags"] = tuple(flags) + + return rule_name + + +@action_function +def action_build_cell_measure(engine, cellm_fact): + (var_name,) = cellm_fact + var = engine.cf_var.cf_group.cell_measures[var_name] + hh.build_cell_measures(engine, var) + + +@action_function +def action_build_ancil_var(engine, ancil_fact): + (var_name,) = ancil_fact + var = engine.cf_var.cf_group.ancillary_variables[var_name] + hh.build_ancil_var(engine, var) + + +@action_function +def action_build_label_coordinate(engine, label_fact): + (var_name,) = label_fact + var = engine.cf_var.cf_group.labels[var_name] + hh.build_auxiliary_coordinate(engine, var) + + def run_actions(engine): + """ + Run all actions for a cube. + + This is the top-level "activation" function which runs all the appropriate + rules actions to translate facts and build all the cube elements. + + The specific cube being translated is "engine.cube". + + """ + # default (all cubes) action, always runs action_default(engine) # This should run the default rules. @@ -400,7 +466,7 @@ def run_actions(engine): for dimcoord_fact in dimcoord_facts: action_provides_coordinate(engine, dimcoord_fact) - # build coordinates + # build (dimension) coordinates providescoord_facts = engine.fact_list("provides-coordinate-(oftype)") for providescoord_fact in providescoord_facts: action_build_dimension_coordinate(engine, providescoord_fact) @@ -409,3 +475,23 @@ def run_actions(engine): auxcoord_facts = engine.fact_list("auxiliary_coordinate") for auxcoord_fact in auxcoord_facts: action_build_auxiliary_coordinate(engine, auxcoord_fact) + + # Detect + process and special 'ukmo' attributes + # Run on every cube : they choose themselves whether to trigger. + action_ukmo_stash(engine) + action_ukmo_processflags(engine) + + # cell measures + cellm_facts = engine.fact_list("cell_measure") + for cellm_fact in cellm_facts: + action_build_cell_measure(engine, cellm_fact) + + # ancillary variables + ancil_facts = engine.fact_list("ancillary_variable") + for ancil_fact in ancil_facts: + action_build_ancil_var(engine, ancil_fact) + + # label coords + label_facts = engine.fact_list("label") + for label_fact in label_facts: + action_build_label_coordinate(engine, label_fact) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index 6946c2819e..c3329209ce 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -157,7 +157,7 @@ def load_single_cube(engine): # If requested, directly compare the pyke and non-pyke outputs. if self.compare_pyke_nonpyke: # Compare the loaded cubes from both engines. - print("\nPYKE-NONPYKE COMPARE") + # print("\nPYKE-NONPYKE COMPARE") # First zap cube-data, as masked data does not compare well. def unmask_cube(cube): From f8a1bbe381af75bb84707490f89d00eeff24516f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 23:29:49 +0100 Subject: [PATCH 32/53] Improved comments in actions routines. --- .../fileformats/_nc_load_rules/actions.py | 54 +++++++++++-------- 1 file changed, 32 insertions(+), 22 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 97141c4e82..227fea587e 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -27,7 +27,7 @@ other rule-type logic. Each 'action' function can replace several similar 'rules'. -E.G. 'action_provides_grid_mapping' replaces all 'fc_provides_grid+mapping_'. +E.G. 'action_provides_grid_mapping' replaces all 'fc_provides_grid_mapping_'. To aid debug, each returns a 'rule_name' string, indicating which original rule this particular action call is emulating : In some cases, this may include a textual note that this rule 'failed', aka "did not trigger", which would not be @@ -82,6 +82,7 @@ def inner(engine, *args, **kwargs): @action_function def action_default(engine): + """Standard operations for every cube.""" hh.build_cube_metadata(engine) @@ -90,7 +91,7 @@ def action_default(engine): # routines: # (@0) a validity-checker (or None) # (@1) a coord-system builder function. -_grid_types_to_checker_builder = { +_GRIDTYPE_CHECKER_AND_BUILDER = { hh.CF_GRID_MAPPING_LAT_LON: (None, hh.build_coordinate_system), hh.CF_GRID_MAPPING_ROTATED_LAT_LON: ( None, @@ -133,29 +134,27 @@ def action_default(engine): @action_function def action_provides_grid_mapping(engine, gridmapping_fact): + """Convert a CFGridMappingVariable into a cube coord-system.""" (var_name,) = gridmapping_fact rule_name = "fc_provides_grid_mapping" cf_var = engine.cf_var.cf_group[var_name] grid_mapping_type = getattr(cf_var, hh.CF_ATTR_GRID_MAPPING_NAME, None) + succeed = True if grid_mapping_type is None: succeed = False rule_name += " --FAILED(no grid-mapping attr)" else: grid_mapping_type = grid_mapping_type.lower() + if succeed: - if grid_mapping_type in _grid_types_to_checker_builder: - checker, builder = _grid_types_to_checker_builder[ - grid_mapping_type - ] + if grid_mapping_type in _GRIDTYPE_CHECKER_AND_BUILDER: + checker, builder = _GRIDTYPE_CHECKER_AND_BUILDER[grid_mapping_type] rule_name += f"_({grid_mapping_type})" else: succeed = False rule_name += f" --FAILED(unhandled type {grid_mapping_type})" - # We DON'T call this, as we already identified the type in the call. - # if succeed and not is_grid_mapping(engine, var_name, grid_mapping_type): - # succeed = False - # rule_name += f' --(FAILED is_grid_mapping)' + if succeed: if checker is not None and not checker(engine, var_name): succeed = False @@ -172,6 +171,7 @@ def action_provides_grid_mapping(engine, gridmapping_fact): f" --(FAILED overwrite coord-sytem " f"{old_gridtype} with {grid_mapping_type})" ) + if succeed: engine.cube_parts["coordinate_system"] = coordinate_system engine.add_fact("grid-type", (grid_mapping_type,)) @@ -181,6 +181,7 @@ def action_provides_grid_mapping(engine, gridmapping_fact): @action_function def action_provides_coordinate(engine, dimcoord_fact): + """Identify the coordinate 'type' of a CFCoordinateVariable.""" (var_name,) = dimcoord_fact # Identify the "type" of a coordinate variable @@ -224,12 +225,12 @@ def action_provides_coordinate(engine, dimcoord_fact): # Lookup table used by 'action_build_dimension_coordinate'. # Maps each supported coordinate-type name (a rules-internal concept) to a pair # of information values : -# (@0) the CF grid_mapping_name (or None) +# (@0) A grid "type", one of latlon/rotated/projected (or None) # If set, the cube should have a coord-system, which is set on the # resulting coordinate. If None, the coord has no coord_system. # (@1) an (optional) fixed standard-name for the coordinate, or None # If None, the coordinate name is copied from the source variable -_coordtype_to_gridtype_coordname = { +_COORDTYPE_GRIDTYPES_AND_COORDNAMES = { "latitude": ("latlon", hh.CF_VALUE_STD_NAME_LAT), "longitude": ("latlon", hh.CF_VALUE_STD_NAME_LON), "rotated_latitude": ( @@ -250,10 +251,13 @@ def action_provides_coordinate(engine, dimcoord_fact): @action_function def action_build_dimension_coordinate(engine, providescoord_fact): + """Convert a CFCoordinateVariable into a cube dim-coord.""" coord_type, var_name = providescoord_fact cf_var = engine.cf_var.cf_group[var_name] rule_name = f"fc_build_coordinate_({coord_type})" - coord_grid_class, coord_name = _coordtype_to_gridtype_coordname[coord_type] + coord_grid_class, coord_name = _COORDTYPE_GRIDTYPES_AND_COORDNAMES[ + coord_type + ] if coord_grid_class is None: # Coordinates not identified with a specific grid-type class (latlon, # rotated or projected) are always built, but can have no coord-system. @@ -275,10 +279,11 @@ def action_build_dimension_coordinate(engine, providescoord_fact): succeed = True cs_gridclass = None else: + # Get a grid-class from the grid-type + # i.e. one of latlon/rotated/projected, as for coord_grid_class. gridtypes_factlist = engine.fact_list("grid-type") (gridtypes_fact,) = gridtypes_factlist # only 1 fact (cs_gridtype,) = gridtypes_fact # fact contains 1 term - # (i.e. one of latlon/rotated/prjected, like coord_grid_class) if cs_gridtype == "latitude_longitude": cs_gridclass = "latlon" elif cs_gridtype == "rotated_latitude_longitude": @@ -299,6 +304,7 @@ def action_build_dimension_coordinate(engine, providescoord_fact): elif cs_gridclass == "rotated": # We disallow this case succeed = False + rule_name += "(FAILED : latlon coord with rotated cs)" else: assert cs_gridclass == "projected" # succeed, no error, but discards the coord-system @@ -307,8 +313,6 @@ def action_build_dimension_coordinate(engine, providescoord_fact): coord_system = None rule_name += "(no-cs : discarded projected cs)" elif coord_grid_class == "rotated": - # For rotated, we also accept no coord-system, but do *not* accept - # the presence of an unsuitable type. if cs_gridclass == "rotated": succeed = True rule_name += "(rotated)" @@ -316,14 +320,14 @@ def action_build_dimension_coordinate(engine, providescoord_fact): succeed = True rule_name += "(rotated no-cs)" elif cs_gridclass == "latlon": - # We allow this, but discard the CS + # We disallow this case succeed = False - rule_name += "(FAILED rotated with latlon-cs)" + rule_name += "(FAILED rotated coord with latlon cs)" else: assert cs_gridclass == "projected" succeed = True coord_system = None - rule_name += "(rotated : discarded projected cs)" + rule_name += "(rotated no-cs : discarded projected cs)" elif coord_grid_class == "projected": # In this case, can *only* build a coord at all if there is a # coord-system of the correct class (i.e. 'projected'). @@ -346,12 +350,12 @@ def action_build_dimension_coordinate(engine, providescoord_fact): @action_function def action_build_auxiliary_coordinate(engine, auxcoord_fact): + """Convert a CFAuxiliaryCoordinateVariable into a cube aux-coord.""" (var_name,) = auxcoord_fact rule_name = "fc_build_auxiliary_coordinate" - # FOR NOW: attempt to identify type - # TODO: eventually remove much of this, which only affects rule_name. - # (but could possibly retain for future debugging purposes) + # Identify any known coord "type" : latitude/longitude/time/time_period + # If latitude/longitude, this sets the standard_name of the built AuxCoord coord_type = "" # unidentified : can be OK coord_name = None if hh.is_time(engine, var_name): @@ -384,6 +388,7 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): @action_function def action_ukmo_stash(engine): + """Convert 'ukmo stash' cf property into a cube attribute.""" rule_name = "fc_attribute_ukmo__um_stash_source" var = engine.cf_var attr_name = "ukmo__um_stash_source" @@ -402,6 +407,7 @@ def action_ukmo_stash(engine): @action_function def action_ukmo_processflags(engine): + """Convert 'ukmo process flags' cf property into a cube attribute.""" rule_name = "fc_attribute_ukmo__process_flags" var = engine.cf_var attr_name = "ukmo__process_flags" @@ -418,6 +424,7 @@ def action_ukmo_processflags(engine): @action_function def action_build_cell_measure(engine, cellm_fact): + """Convert a CFCellMeasureVariable into a cube cell-measure.""" (var_name,) = cellm_fact var = engine.cf_var.cf_group.cell_measures[var_name] hh.build_cell_measures(engine, var) @@ -425,6 +432,7 @@ def action_build_cell_measure(engine, cellm_fact): @action_function def action_build_ancil_var(engine, ancil_fact): + """Convert a CFAncillaryVariable into a cube ancil-var.""" (var_name,) = ancil_fact var = engine.cf_var.cf_group.ancillary_variables[var_name] hh.build_ancil_var(engine, var) @@ -432,6 +440,7 @@ def action_build_ancil_var(engine, ancil_fact): @action_function def action_build_label_coordinate(engine, label_fact): + """Convert a CFLabelVariable into a cube string-type aux-coord.""" (var_name,) = label_fact var = engine.cf_var.cf_group.labels[var_name] hh.build_auxiliary_coordinate(engine, var) @@ -467,6 +476,7 @@ def run_actions(engine): action_provides_coordinate(engine, dimcoord_fact) # build (dimension) coordinates + # The 'provides' step and the grid-mapping must have already been done. providescoord_facts = engine.fact_list("provides-coordinate-(oftype)") for providescoord_fact in providescoord_facts: action_build_dimension_coordinate(engine, providescoord_fact) From 6eaf756c1fa8cd575f0b477ff8d2e47c9bb76af3 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 10 Jun 2021 17:54:20 +0100 Subject: [PATCH 33/53] Added actions for formulae (aka hybrid coords, factories). --- .../fileformats/_nc_load_rules/actions.py | 56 ++++++++++++++++++- 1 file changed, 55 insertions(+), 1 deletion(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 227fea587e..568e01f84d 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -45,9 +45,12 @@ """ from . import helpers as hh -import iris.fileformats.pp as pp + from functools import wraps +import iris.fileformats.pp as pp +import iris.fileformats.cf + def _default_rulenamesfunc(func_name): # A simple default function to deduce the rules-name from an action-name. @@ -446,6 +449,46 @@ def action_build_label_coordinate(engine, label_fact): hh.build_auxiliary_coordinate(engine, var) +@action_function +def action_formula_type(engine, formula_root_fact): + """Register a CFVariable as a formula root.""" + rule_name = "fc_formula_type" + (var_name,) = formula_root_fact + cf_var = engine.cf_var.cf_group[var_name] + # var.standard_name is a formula type (or we should never get here). + formula_type = getattr(cf_var, "standard_name", None) + succeed = True + if formula_type not in iris.fileformats.cf.reference_terms: + succeed = False + rule_name += f"(FAILED - unrecognised formula type = {formula_type!r})" + if succeed: + # Check we don't already have one. + existing_type = engine.requires.get("formula_type") + if existing_type: + succeed = False + rule_name += ( + f"(FAILED - new formula type ={formula_type!r} " + f"collided with existing one ={existing_type!r}.)" + ) + if succeed: + rule_name += f"_{formula_type}" + # Set 'requires' info for iris.fileformats.netcdf._load_aux_factory. + engine.requires["formula_type"] = formula_type + + return rule_name + + +@action_function +def action_formula_term(engine, formula_term_fact): + """Register a CFVariable as a formula term.""" + # Must run AFTER formula root identification. + (termvar_name, rootvar_name, term_name) = formula_term_fact + # The rootname is implicit : have only one per cube + # TODO: change when we adopt cf-1.7 advanced grid-mping syntax + engine.requires.setdefault("formula_terms", {})[term_name] = termvar_name + rule_name = f"fc_formula_term({term_name})" + + def run_actions(engine): """ Run all actions for a cube. @@ -505,3 +548,14 @@ def run_actions(engine): label_facts = engine.fact_list("label") for label_fact in label_facts: action_build_label_coordinate(engine, label_fact) + + # formula root variables + formula_root_facts = engine.fact_list("formula_root") + for root_fact in formula_root_facts: + action_formula_type(engine, root_fact) + + # formula terms + # The 'formula_root's must have already been done. + formula_term_facts = engine.fact_list("formula_term") + for term_fact in formula_term_facts: + action_formula_term(engine, term_fact) From 09df997ac3a4508b36fcdd6de0d0efe10853535c Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 10 Jun 2021 21:56:23 +0100 Subject: [PATCH 34/53] Tiny fix. --- lib/iris/fileformats/_nc_load_rules/actions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 568e01f84d..4676389cb7 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -487,6 +487,7 @@ def action_formula_term(engine, formula_term_fact): # TODO: change when we adopt cf-1.7 advanced grid-mping syntax engine.requires.setdefault("formula_terms", {})[term_name] = termvar_name rule_name = f"fc_formula_term({term_name})" + return rule_name def run_actions(engine): From be9c0f1671854e2835e8beb104ec779dd6634263 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 15 Jun 2021 15:56:21 +0100 Subject: [PATCH 35/53] Small review changes. --- lib/iris/fileformats/_nc_load_rules/actions.py | 4 ++-- .../load_cube/load_cube__activate/__init__.py | 13 ++++++++----- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 4676389cb7..0f80866412 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -455,7 +455,7 @@ def action_formula_type(engine, formula_root_fact): rule_name = "fc_formula_type" (var_name,) = formula_root_fact cf_var = engine.cf_var.cf_group[var_name] - # var.standard_name is a formula type (or we should never get here). + # cf_var.standard_name is a formula type (or we should never get here). formula_type = getattr(cf_var, "standard_name", None) succeed = True if formula_type not in iris.fileformats.cf.reference_terms: @@ -484,7 +484,7 @@ def action_formula_term(engine, formula_term_fact): # Must run AFTER formula root identification. (termvar_name, rootvar_name, term_name) = formula_term_fact # The rootname is implicit : have only one per cube - # TODO: change when we adopt cf-1.7 advanced grid-mping syntax + # TODO: change when we adopt cf-1.7 advanced grid-mapping syntax engine.requires.setdefault("formula_terms", {})[term_name] = termvar_name rule_name = f"fc_formula_term({term_name})" return rule_name diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index c3329209ce..705496e397 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -131,7 +131,7 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): iris.fileformats.netcdf.DEBUG = self.debug # Call the main translation function to load a single cube. - def load_single_cube(engine): + def translate_cube(engine): # _load_cube establishes per-cube facts, activates rules and # produces an actual cube. cube = _load_cube(engine, cf, cf_var, nc_path) @@ -150,18 +150,21 @@ def load_single_cube(engine): return cube if do_pyke: - pyke_cube = load_single_cube(pyke_engine) + pyke_cube = translate_cube(pyke_engine) if do_nonpyke: - nonpyke_cube = load_single_cube(nonpyke_engine) + nonpyke_cube = translate_cube(nonpyke_engine) # If requested, directly compare the pyke and non-pyke outputs. if self.compare_pyke_nonpyke: # Compare the loaded cubes from both engines. # print("\nPYKE-NONPYKE COMPARE") - # First zap cube-data, as masked data does not compare well. + # Make a duplicate cube with un-masked cube data, as masked data + # does not compare well (i.e. cube1 == cube2 may yield 'masked' + # instead of a boolean). def unmask_cube(cube): - # preserve the original, we're going to realise.. + # Make a copy, so that we can realise the data without + # modifying the original cube. cube = cube.copy() if isinstance(cube.data, np.ma.MaskedArray): cube.data = cube.data.filled(0) From 0c5a5f67e50eacadafc787a6ee141ac00bfa63de Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 22 Jun 2021 11:08:05 +0100 Subject: [PATCH 36/53] Added basic tests for Engine class. --- .../netcdf/load_cube/test_engine.py | 106 ++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py new file mode 100644 index 0000000000..a385640e01 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py @@ -0,0 +1,106 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module. + +TODO: relocate under iris/tests/unit/fileformats/nc_load_rules + +""" +import iris.tests as tests + +from unittest import mock + +from iris.fileformats._nc_load_rules.engine import Engine, FactEntity + + +class Test_Engine(tests.IrisTest): + def setUp(self): + self.empty_engine = Engine() + engine = Engine() + engine.add_fact("this", ("that", "other")) + self.nonempty_engine = engine + + def test__init(self): + # Check that init creates an empty Engine. + engine = Engine() + self.assertIsInstance(engine, Engine) + self.assertIsInstance(engine.facts, FactEntity) + self.assertEqual(list(engine.facts.entity_lists.keys()), []) + + def test_reset(self): + # Check that calling reset() causes a non-empty engine to be emptied. + engine = self.nonempty_engine + fact_names = list(engine.facts.entity_lists.keys()) + self.assertNotEqual(len(fact_names), 0) + engine.reset() + fact_names = list(engine.facts.entity_lists.keys()) + self.assertEqual(len(fact_names), 0) + + def test_activate(self): + # Check that calling engine.activate() --> actions.run_actions(engine) + engine = self.empty_engine + target = "iris.fileformats._nc_load_rules.engine.run_actions" + run_call = self.patch(target) + engine.activate() + self.assertEqual(run_call.call_args_list, [mock.call(engine)]) + + def test_add_case_specific_fact__newname(self): + # Adding a new fact to a new fact-name records as expected. + engine = self.nonempty_engine + engine.add_case_specific_fact("junkname", "new_fact", ("a1", "a2")) + self.assertEqual(engine.fact_list("new_fact"), [("a1", "a2")]) + + def test_add_case_specific_fact__existingname(self): + # Adding a new fact to an existing fact-name records as expected. + engine = self.nonempty_engine + name = "this" + self.assertEqual(engine.fact_list(name), [("that", "other")]) + engine.add_case_specific_fact("junkname", name, ("yetanother",)) + self.assertEqual( + engine.fact_list(name), [("that", "other"), ("yetanother",)] + ) + + def test_add_case_specific_fact__emptyargs(self): + # Check that empty args work ok, and will create a new fact. + engine = self.empty_engine + engine.add_case_specific_fact("junkname", "new_fact", ()) + self.assertIn("new_fact", engine.facts.entity_lists) + self.assertEqual(engine.fact_list("new_fact"), [()]) + + def test_add_fact(self): + # Check that 'add_fact' is equivalent to (short for) a call to + # 'add_case_specific_fact'. + engine = self.empty_engine + target = ( + "iris.fileformats._nc_load_rules.engine.Engine" + ".add_case_specific_fact" + ) + acsf_call = self.patch(target) + engine.add_fact("extra", ()) + self.assertEqual(acsf_call.call_count, 1) + self.assertEqual( + acsf_call.call_args_list, + [mock.call(kb_name="", fact_name="extra", fact_arglist=())], + ) + + def test_get_kb(self): + # Check that this stub just returns the facts database. + engine = self.nonempty_engine + kb = engine.get_kb() + self.assertIsInstance(kb, FactEntity) + self.assertIs(kb, engine.facts) + + def test_fact_list__existing(self): + self.assertEqual( + self.nonempty_engine.fact_list("this"), [("that", "other")] + ) + + def test_fact_list__nonexisting(self): + self.assertEqual(self.empty_engine.fact_list("odd-unknown"), []) + + +if __name__ == "__main__": + tests.main() From b55a925396f6b115d629c83547c727059aa4aa2f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 22 Jun 2021 15:50:04 +0100 Subject: [PATCH 37/53] Raise actual warning for unrecognised formulae. --- lib/iris/fileformats/_nc_load_rules/actions.py | 3 +++ .../load_cube/load_cube__activate/test__hybrid_formulae.py | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 0f80866412..0e77fb52fe 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -47,6 +47,7 @@ from . import helpers as hh from functools import wraps +import warnings import iris.fileformats.pp as pp import iris.fileformats.cf @@ -461,6 +462,8 @@ def action_formula_type(engine, formula_root_fact): if formula_type not in iris.fileformats.cf.reference_terms: succeed = False rule_name += f"(FAILED - unrecognised formula type = {formula_type!r})" + msg = f"Ignored formula of unrecognised type: {formula_type!r}." + warnings.warn(msg) if succeed: # Check we don't already have one. existing_type = engine.requires.get("formula_type") diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py index c1a325925f..3e7d7fe9cd 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py @@ -163,7 +163,9 @@ def test_unrecognised_verticaltype(self): # 005 : fc_formula_terms # 006 : fc_formula_terms result = self.run_testcase( - formula_root_name="unknown", term_names=["a", "b"] + formula_root_name="unknown", + term_names=["a", "b"], + warning="Ignored formula of unrecognised type: 'unknown'.", ) # Check that it picks up the terms, but *not* the factory root coord, # which is simply discarded. From 0da7ef361e1d61c0a3a5c57864f425933a1a327e Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 22 Jun 2021 16:04:22 +0100 Subject: [PATCH 38/53] Remove unreachable warning code. --- lib/iris/fileformats/_nc_load_rules/actions.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 0e77fb52fe..8225e2de43 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -339,11 +339,8 @@ def action_build_dimension_coordinate(engine, providescoord_fact): if not succeed: rule_name += "(FAILED projected coord with non-projected cs)" else: - msg = ( - f'Unexpected coord grid-class "{coord_grid_class}" ' - f"for coord {var_name}." - ) - raise ValueError(msg) + # Just FYI : literally not possible, as we already asserted this. + assert coord_grid_class in grid_classes if succeed: hh.build_dimension_coordinate( From 4bba4007b3916c16393d15b787efc5891287d605 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 22 Jun 2021 16:20:55 +0100 Subject: [PATCH 39/53] Remove unreachable rule-fail code. --- lib/iris/fileformats/_nc_load_rules/actions.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 8225e2de43..8198e68261 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -166,18 +166,12 @@ def action_provides_grid_mapping(engine, gridmapping_fact): if succeed: coordinate_system = builder(engine, cf_var) + engine.cube_parts["coordinate_system"] = coordinate_system + # Check there is not an existing one. - old_gridtype_fact = engine.fact_list("grid-type") - if old_gridtype_fact: - (old_gridtype,) = old_gridtype_fact - succeed = False - rule_name += ( - f" --(FAILED overwrite coord-sytem " - f"{old_gridtype} with {grid_mapping_type})" - ) + # ATM this is guaranteed by the caller, "run_actions". + assert engine.fact_list("grid-type") == [] - if succeed: - engine.cube_parts["coordinate_system"] = coordinate_system engine.add_fact("grid-type", (grid_mapping_type,)) return rule_name From eb46d5189737b446470d8b4cbbf63712d1859763 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 22 Jun 2021 19:27:36 +0100 Subject: [PATCH 40/53] Testcase for multiple hybrid coordinates. --- .../fileformats/_nc_load_rules/actions.py | 13 ++-- .../test__hybrid_formulae.py | 72 +++++++++++++++++-- 2 files changed, 75 insertions(+), 10 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 8198e68261..76fea489d3 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -459,12 +459,17 @@ def action_formula_type(engine, formula_root_fact): # Check we don't already have one. existing_type = engine.requires.get("formula_type") if existing_type: + # NOTE: in this case, for now, we will accept the last appearing, + # which matches the older behaviour. + # TODO: this needs resolving, somehow. succeed = False - rule_name += ( - f"(FAILED - new formula type ={formula_type!r} " - f"collided with existing one ={existing_type!r}.)" + msg = ( + "Omitting factories for some hybrid coordinates, as multiple " + "hybrid coordinates on a single variable are not supported: " + f"Formula of type ={formula_type!r} " + f"overrides another of type ={existing_type!r}.)" ) - if succeed: + warnings.warn(msg) rule_name += f"_{formula_type}" # Set 'requires' info for iris.fileformats.netcdf._load_aux_factory. engine.requires["formula_type"] = formula_type diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py index 3e7d7fe9cd..bcbd97290b 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py @@ -19,7 +19,9 @@ class Mixin__formulae_tests(Mixin__nc_load_actions): - def _make_testcase_cdl(self, formula_root_name=None, term_names=None): + def _make_testcase_cdl( + self, formula_root_name=None, term_names=None, extra_formula_type=None + ): """Construct a testcase CDL for data with hybrid vertical coords.""" if formula_root_name is None: formula_root_name = "atmosphere_hybrid_height_coordinate" @@ -29,22 +31,54 @@ def _make_testcase_cdl(self, formula_root_name=None, term_names=None): # unsupported type : just make something up term_names = ["term1"] - terms_string = "" - phenom_coord_names = ["vert"] # always include the root variable + # Arrange to create additional term variables for an 'extra' hybrid + # formula, if requested. + if extra_formula_type is None: + term_names_extra = [] + phenom_coord_names = ["vert"] # always include the root variable + else: + phenom_coord_names = ["vert", "vert_2"] # two formula coords + term_names_extra = hh.CF_COORD_VERTICAL.get(extra_formula_type) + + # Build strings to define term variables. formula_term_strings = [] - for term_name in term_names: + extra_formula_term_strings = [] + terms_string = "" + for term_name in term_names + term_names_extra: term_varname = "v_" + term_name + # Include in the phenom coordinates list. phenom_coord_names.append(term_varname) - formula_term_strings.append(f"{term_name}: {term_varname}") + term_string = f"{term_name}: {term_varname}" + if term_name in term_names: + # Include in the 'main' terms list. + formula_term_strings.append(term_string) + else: + # Include in the 'extra' terms list. + extra_formula_term_strings.append(term_string) terms_string += f""" double {term_varname}(h) ; {term_varname}:long_name = "{term_name}_long_name" ; {term_varname}:units = "m" ; """ - # remove the extra initial space from the formula terms string + # Construct the reference strings. phenom_coords_string = " ".join(phenom_coord_names) formula_terms_string = " ".join(formula_term_strings) + extra_formula_terms_string = " ".join(extra_formula_term_strings) + + # Construct the 'extra' hybrid coord if requested. + if extra_formula_type is None: + extra_formula_string = "" + else: + # Create the lines to add an 'extra' formula. + # For now, put this on the same dim : makes no difference. + extra_formula_string = f""" + double vert_2(h) ; + vert_2:standard_name = "{extra_formula_type}" ; + vert_2:units = "m" ; + vert_2:formula_terms = "{extra_formula_terms_string}" ; +""" + # Create the main result string. cdl_str = f""" netcdf test {{ @@ -61,6 +95,7 @@ def _make_testcase_cdl(self, formula_root_name=None, term_names=None): vert:units = "m" ; vert:formula_terms = "{formula_terms_string}" ; {terms_string} +{extra_formula_string} }} """ return cdl_str @@ -171,6 +206,31 @@ def test_unrecognised_verticaltype(self): # which is simply discarded. self.check_result(result, factory_type=None, formula_terms=["a", "b"]) + def test_two_formulae(self): + # Construct an example with TWO hybrid coords. + # This is not errored, but we don't correctly support it. + # + # NOTE: the original Pyke implementation does not detect this problem + # By design, the new mechanism does + will raise a warning. + warning = ( + "Omitting factories for some hybrid coordinates.*" + "multiple hybrid coordinates.* not supported" + ) + + extra_type = "ocean_sigma_coordinate" + result = self.run_testcase( + extra_formula_type=extra_type, warning=warning + ) + # NOTE: FOR NOW, check expected behaviour : only one factory will be + # built, but there are coordinates (terms) for both types. + # TODO: this is a bug and needs fixing : translation should handle + # multiple hybrid coordinates in a sensible way. + self.check_result( + result, + factory_type=extra_type, + formula_terms=["a", "b", "depth", "eta", "orog", "sigma"], + ) + # Add in tests methods to exercise each (supported) vertical coordinate type # individually. From ce1e438b03420d2e9cdf3d1a9d0bc88d4a000c77 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 22 Jun 2021 19:38:52 +0100 Subject: [PATCH 41/53] Fix rotated-aux-latitude test. --- .../load_cube/load_cube__activate/test__grid_mappings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index 5a11a2cc88..e40881aefd 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -846,9 +846,9 @@ def test_aux_lat_rotated(self): # 005 : fc_build_coordinate_latitude_rotated result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, - xco_is_dim=False, + yco_is_dim=False, ) - self.check_result(result, xco_is_aux=True, xco_no_cs=True) + self.check_result(result, yco_is_aux=True, yco_no_cs=True) class Test__aux_latlons__pyke_rules(Mixin__aux_latlons, tests.IrisTest): From 2fdcdf98e88b7f192266f2e796fbe25c7a45e688 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 23 Jun 2021 14:15:13 +0100 Subject: [PATCH 42/53] Isort fixes. --- lib/iris/fileformats/_nc_load_rules/actions.py | 6 +++--- lib/iris/fileformats/_nc_load_rules/helpers.py | 7 +++---- .../netcdf/load_cube/load_cube__activate/__init__.py | 2 +- .../load_cube/load_cube__activate/test__additional.py | 2 -- .../load_cube/load_cube__activate/test__grid_mappings.py | 4 +--- .../load_cube/load_cube__activate/test__hybrid_formulae.py | 3 +-- .../load_cube/load_cube__activate/test__miscellaneous.py | 6 ++---- .../load_cube/load_cube__activate/test__time_coords.py | 5 +---- .../tests/unit/fileformats/netcdf/load_cube/test_engine.py | 3 +-- 9 files changed, 13 insertions(+), 25 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 76fea489d3..8e4ad7afb6 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -44,13 +44,13 @@ """ -from . import helpers as hh - from functools import wraps import warnings -import iris.fileformats.pp as pp import iris.fileformats.cf +import iris.fileformats.pp as pp + +from . import helpers as hh def _default_rulenamesfunc(func_name): diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index ce7a194b35..458e130ac3 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -20,20 +20,19 @@ import iris.aux_factory from iris.common.mixin import _get_valid_standard_name -import iris.coords import iris.coord_systems +import iris.coords +import iris.exceptions import iris.fileformats.cf as cf import iris.fileformats.netcdf from iris.fileformats.netcdf import ( + UnknownCellMethodWarning, _get_cf_var_data, parse_cell_methods, - UnknownCellMethodWarning, ) -import iris.exceptions import iris.std_names import iris.util - # # UD Units Constants (based on Unidata udunits.dat definition file) # diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index 705496e397..a85baaf856 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -21,10 +21,10 @@ import numpy as np +import iris.fileformats._nc_load_rules.engine from iris.fileformats.cf import CFReader import iris.fileformats.netcdf from iris.fileformats.netcdf import _load_cube -import iris.fileformats._nc_load_rules.engine """ Notes on testing method. diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py index 957c736501..8340147bb1 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py @@ -15,8 +15,6 @@ """ import iris.tests as tests - - from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate.test__grid_mappings import ( Mixin__grid_mapping, ) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index e40881aefd..e054baa431 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -10,11 +10,9 @@ Here, *specifically* testcases relating to grid-mappings and dim-coords. """ -import iris.tests as tests - import iris.coord_systems as ics import iris.fileformats._nc_load_rules.helpers as hh - +import iris.tests as tests from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( Mixin__nc_load_actions, ) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py index bcbd97290b..60e3253707 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py @@ -10,9 +10,8 @@ Test rules activation relating to hybrid vertical coordinates. """ -import iris.tests as tests - import iris.fileformats._nc_load_rules.helpers as hh +import iris.tests as tests from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( Mixin__nc_load_actions, ) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py index d41b19e108..a35a469e94 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py @@ -14,11 +14,9 @@ * ancillary variables """ -import iris.tests as tests - -from iris.coords import AuxCoord, CellMeasure, AncillaryVariable +from iris.coords import AncillaryVariable, AuxCoord, CellMeasure from iris.fileformats.pp import STASH - +import iris.tests as tests from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( Mixin__nc_load_actions, ) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py index fa010f446d..c143d964ec 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -10,11 +10,8 @@ Tests for rules activation relating to 'time' and 'time_period' coords. """ -import iris.tests as tests - from iris.coords import AuxCoord, DimCoord - - +import iris.tests as tests from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( Mixin__nc_load_actions, ) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py index a385640e01..efc1f7e287 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py @@ -9,11 +9,10 @@ TODO: relocate under iris/tests/unit/fileformats/nc_load_rules """ -import iris.tests as tests - from unittest import mock from iris.fileformats._nc_load_rules.engine import Engine, FactEntity +import iris.tests as tests class Test_Engine(tests.IrisTest): From f985b0c23b754cf44d3c341d3026ad20f4b32d1d Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 15 Jun 2021 19:45:21 +0100 Subject: [PATCH 43/53] Remove pyke and simplify testing. --- .gitignore | 1 - MANIFEST.in | 2 - asv.conf.json | 1 - docs/src/developers_guide/release.rst | 2 +- docs/src/installing.rst | 2 +- .../fileformats/_nc_load_rules/__init__.py | 2 +- .../fileformats/_nc_load_rules/actions.py | 4 - lib/iris/fileformats/_nc_load_rules/engine.py | 85 +- .../fileformats/_pyke_rules/fc_rules_cf.krb | 2355 ----------------- lib/iris/fileformats/netcdf.py | 137 +- lib/iris/tests/test_coding_standards.py | 1 - lib/iris/tests/test_netcdf.py | 12 +- .../__init__.py | 3 +- .../actions}/__init__.py | 125 +- .../actions}/test__grid_mappings.py | 69 +- .../actions}/test__hybrid_formulae.py | 30 +- .../actions}/test__miscellaneous.py | 35 +- .../actions}/test__time_coords.py | 36 +- .../nc_load_rules/engine}/__init__.py | 5 + .../engine}/test_engine.py | 10 +- .../helpers}/__init__.py | 6 +- ...ild_albers_equal_area_coordinate_system.py | 28 +- .../test_build_auxiliary_coordinate.py | 168 +- .../helpers}/test_build_cube_metadata.py | 64 +- .../test_build_dimension_coordinate.py | 262 +- ...t_build_geostationary_coordinate_system.py | 38 +- ..._azimuthal_equal_area_coordinate_system.py | 31 +- ...ild_lambert_conformal_coordinate_system.py | 28 +- .../test_build_mercator_coordinate_system.py | 37 +- ...t_build_stereographic_coordinate_system.py | 30 +- ...d_transverse_mercator_coordinate_system.py | 38 +- .../test_build_verticalp_coordinate_system.py | 45 +- .../helpers}/test_get_attr_units.py | 18 +- .../helpers}/test_get_cf_bounds_var.py | 17 +- .../helpers}/test_get_names.py | 185 +- .../test_has_supported_mercator_parameters.py | 48 +- ..._has_supported_stereographic_parameters.py | 26 +- .../helpers}/test_reorder_bounds_data.py | 29 +- .../tests/unit/fileformats/netcdf/__init__.py | 2 +- .../fileformats/netcdf/load_cube/__init__.py | 6 - .../load_cube__activate/test__additional.py | 55 - .../netcdf/{load_cube => }/test__load_cube.py | 0 .../compiled_krb/fc_rules_cf_fc/__init__.py | 6 - pyproject.toml | 2 - requirements/ci/nox.lock/py37-linux-64.lock | 232 -- requirements/ci/nox.lock/py38-linux-64.lock | 226 -- requirements/ci/py37.yml | 1 - requirements/ci/py38.yml | 1 - setup.cfg | 1 - setup.py | 56 +- 50 files changed, 853 insertions(+), 3750 deletions(-) delete mode 100644 lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb => nc_load_rules}/__init__.py (72%) rename lib/iris/tests/unit/fileformats/{netcdf/load_cube/load_cube__activate => nc_load_rules/actions}/__init__.py (50%) rename lib/iris/tests/unit/fileformats/{netcdf/load_cube/load_cube__activate => nc_load_rules/actions}/test__grid_mappings.py (96%) rename lib/iris/tests/unit/fileformats/{netcdf/load_cube/load_cube__activate => nc_load_rules/actions}/test__hybrid_formulae.py (96%) rename lib/iris/tests/unit/fileformats/{netcdf/load_cube/load_cube__activate => nc_load_rules/actions}/test__miscellaneous.py (92%) rename lib/iris/tests/unit/fileformats/{netcdf/load_cube/load_cube__activate => nc_load_rules/actions}/test__time_coords.py (97%) rename lib/iris/{fileformats/_pyke_rules => tests/unit/fileformats/nc_load_rules/engine}/__init__.py (68%) rename lib/iris/tests/unit/fileformats/{netcdf/load_cube => nc_load_rules/engine}/test_engine.py (90%) rename lib/iris/tests/unit/fileformats/{pyke_rules => nc_load_rules/helpers}/__init__.py (67%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_albers_equal_area_coordinate_system.py (79%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_auxiliary_coordinate.py (68%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_cube_metadata.py (67%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_dimension_coordinate.py (72%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_geostationary_coordinate_system.py (68%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_lambert_azimuthal_equal_area_coordinate_system.py (76%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_lambert_conformal_coordinate_system.py (78%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_mercator_coordinate_system.py (74%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_stereographic_coordinate_system.py (74%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_transverse_mercator_coordinate_system.py (68%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_build_verticalp_coordinate_system.py (63%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_get_attr_units.py (73%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_get_cf_bounds_var.py (78%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_get_names.py (61%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_has_supported_mercator_parameters.py (82%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_has_supported_stereographic_parameters.py (81%) rename lib/iris/tests/unit/fileformats/{pyke_rules/compiled_krb/fc_rules_cf_fc => nc_load_rules/helpers}/test_reorder_bounds_data.py (60%) delete mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py delete mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py rename lib/iris/tests/unit/fileformats/netcdf/{load_cube => }/test__load_cube.py (100%) delete mode 100644 lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/__init__.py delete mode 100644 requirements/ci/nox.lock/py37-linux-64.lock delete mode 100644 requirements/ci/nox.lock/py38-linux-64.lock diff --git a/.gitignore b/.gitignore index 4a589524d2..42b4e94c2c 100644 --- a/.gitignore +++ b/.gitignore @@ -35,7 +35,6 @@ pip-cache # Created by Iris build *.so lib/iris/etc/site.cfg -lib/iris/fileformats/_pyke_rules/compiled_krb/ lib/iris/std_names.py # Iris test result files diff --git a/MANIFEST.in b/MANIFEST.in index 1902f6a74f..62f9dc701b 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,9 +3,7 @@ include CHANGES COPYING COPYING.LESSER # Files from setup.py package_data that are not automatically added to source distributions recursive-include lib/iris/tests/results *.cml *.cdl *.txt *.xml *.json -recursive-exclude lib/iris/fileformats/_pyke_rules/compiled_krb * recursive-include lib/iris/etc * -include lib/iris/fileformats/_pyke_rules/*.krb recursive-include requirements * diff --git a/asv.conf.json b/asv.conf.json index 46cd4839f2..92634b40c5 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -20,7 +20,6 @@ "scipy": [], "setuptools": [], - "pyke": [], "six": [], "nose": [], diff --git a/docs/src/developers_guide/release.rst b/docs/src/developers_guide/release.rst index 0f2340e077..896cc582f1 100644 --- a/docs/src/developers_guide/release.rst +++ b/docs/src/developers_guide/release.rst @@ -104,7 +104,7 @@ To do this perform the following steps. Create a conda environment with the appropriate conda packages to build the source distribution (``sdist``) and pure Python wheel (``bdist_wheel``):: - > conda create -n iris-pypi -c conda-forge --yes pip pyke python setuptools twine wheel + > conda create -n iris-pypi -c conda-forge --yes pip python setuptools twine wheel > . activate iris-pypi Checkout the appropriate Iris ```` tag from the appropriate ````. diff --git a/docs/src/installing.rst b/docs/src/installing.rst index b55bfa967f..1bea781e5a 100644 --- a/docs/src/installing.rst +++ b/docs/src/installing.rst @@ -70,7 +70,7 @@ The rest can be done with pip. Begin with numpy:: Finally, Iris and its Python dependencies can be installed with the following command:: - pip3 install setuptools cftime==1.2.1 cf-units scitools-pyke scitools-iris + pip3 install setuptools cftime==1.2.1 cf-units scitools-iris This procedure was tested on a Ubuntu 20.04 system on the 27th of January, 2021. diff --git a/lib/iris/fileformats/_nc_load_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py index baea3cf555..4409ace8dc 100644 --- a/lib/iris/fileformats/_nc_load_rules/__init__.py +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -9,7 +9,7 @@ Interprets CF concepts identified by :mod:`iris.fileformats.cf` to add components into loaded cubes. -For now : the API which mimics :class:`pyke.knowledge_engine.engine`. +For now : the API mimics :class:`pyke.knowledge_engine.engine`. As this is aiming to replace the old Pyke-based logic rules. TODO: simplify once the parallel operation with Pyke is no longer required. diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 8e4ad7afb6..5813c5bca7 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -33,10 +33,6 @@ textual note that this rule 'failed', aka "did not trigger", which would not be recorded in the original implementation. -The top-level 'run_actions' ensures that the individual rules actions are -called, with various arguments, as appropriate to ensure the whole cube is -built as it was by the original rules implementation. - TODO: remove the use of intermediate "facts" to carry information between actions. This mimics older behaviour, so is still useful while we are still comparing behaviour with the old Pyke rules (debugging). But once that is no diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index 60f956d4d1..ee2cc1bb29 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -15,40 +15,47 @@ engine.get_kb() also returns a FactEntity object, which mimics *just enough* API of a Pyke.knowlege_base, so that we can list its case-specific facts, as -used in :meth:`iris.fileformats.netcdf.pyke_stats`. +used in :meth:`iris.fileformats.netcdf._actions_activation_stats`. """ from .actions import run_actions -class FactList: - def __init__(self): - self.case_specific_facts = [] - - class FactEntity: - # To support: """ - kb_facts = engine.get_kb(_PYKE_FACT_BASE) + An object with an 'entity_lists' property which is a dict of 'FactList's. - for key in kb_facts.entity_lists.keys(): - for arg in kb_facts.entity_lists[key].case_specific_facts: - print("\t%s%s" % (key, arg)) + A Factlist, in turn, is an object with property 'case_specific_facts', + which is a list of tuples of strings + (each of which is a 'fact' of the named class). + + To support the debug code : + kb_facts = engine.get_kb(_PYKE_FACT_BASE) + for key in kb_facts.entity_lists.keys(): + for arg in kb_facts.entity_lists[key].case_specific_facts: + print("\t%s%s" % (key, arg)) """ def __init__(self): self.entity_lists = {} + class _FactList: + # Just "an object with a 'case_specific_facts' property" (which is a list). + def __init__(self): + self.case_specific_facts = [] + def add_fact(self, fact_name, args): + # Add a fact "fact_name(*args)". if fact_name not in self.entity_lists: - self.entity_lists[fact_name] = FactList() + self.entity_lists[fact_name] = self._FactList() fact_list = self.entity_lists[fact_name] fact_list.case_specific_facts.append(tuple(args)) - def sect_facts(self, entity_name): - if entity_name in self.entity_lists: - facts = self.entity_lists.get(entity_name).case_specific_facts + def sect_facts(self, fact_name): + # Lookup all facts "fact_name(*args)" for a given fact_name. + if fact_name in self.entity_lists: + facts = self.entity_lists.get(fact_name).case_specific_facts else: facts = [] return facts @@ -61,6 +68,11 @@ class Engine: Provides just enough API so that the existing code in :mod:`iris.fileformats.netcdf` can interface with our new rules functions. + A list of possible fact-arglists is store, for each of a set of fact-names + (which are strings). + Each fact-argslist is represented by a tuple of values + -- at present, in practice, those are all strings too. + """ def __init__(self): @@ -71,7 +83,7 @@ def reset(self): """Reset the engine = remove all facts.""" self.facts = FactEntity() - def activate(self, rules_base_str=None): + def activate(self): """ Run all the translation rules to produce a single output cube. @@ -79,42 +91,49 @@ def activate(self, rules_base_str=None): set by engine.cf_var (the variable name). The rules operation itself is coded elsewhere, - in :mod:`iris.fileformats.netcdf._nc_load_rules.rules`. + in :mod:`iris.fileformats.netcdf._nc_load_rules.actions`. """ run_actions(self) - def print_stats(self): - """No-op, called by :meth:`iris.fileformats.netcdf.pyke_stats`.""" - pass + def get_kb(self): + """ + Get a FactEntity, which mimic (bits of) a knowledge-base. + + Just allowing + :meth:`iris.fileformats.netcdf._action_activation_stats` to list the + facts. - def add_case_specific_fact(self, kb_name, fact_name, fact_arglist): """ - Record a fact about the current output operation. + return self.facts - Roughly, self.facts.entity_lists[fact_name].append(fact_arglist). + def print_stats(self): + """ + No-op, called by + :meth:`iris.fileformats.netcdf._action_activation_stats`. """ - self.facts.add_fact(fact_name, fact_arglist) + pass - def get_kb(self, fact_base_str=None): + def add_case_specific_fact(self, fact_name, fact_arglist): """ - Get a FactEntity, which mimic (bits of) a knowledge-base. + Record a fact about the current output operation. - Just allowing - :meth:`iris.fileformats.netcdf.pyke_stats` to list the facts. + Roughly, + facts = self.facts.entity_lists[fact_name].case_specific_facts + facts.append(fact_arglist) """ - return self.facts + self.facts.add_fact(fact_name, fact_arglist) def fact_list(self, fact_name): """ Return the facts (arg-lists) for one fact name. - A shorthand form used only by the new rules routines. + A shorthand form used only by the new 'actions' routines. AKA 'case-specific-facts', in the original. - Roughly "return self.facts.entity_lists[fact_name]". + Roughly = "self.facts.entity_lists[fact_name].case_specific_facts". """ return self.facts.sect_facts(fact_name) @@ -123,9 +142,9 @@ def add_fact(self, fact_name, fact_arglist): """ Add a new fact. - A shorthand form used only by the new rules routines. + A shorthand form used only by the new 'actions' routines. """ self.add_case_specific_fact( - kb_name="", fact_name=fact_name, fact_arglist=fact_arglist + fact_name=fact_name, fact_arglist=fact_arglist ) diff --git a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb deleted file mode 100644 index d41ec6aa3e..0000000000 --- a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb +++ /dev/null @@ -1,2355 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. - -# -# Pyke forward chaining rule interface to translate NetCDF Climate Forecast (CF) -# Metadata Conventions data into an Iris cube. -# -# References: -# -# [CF] NetCDF Climate and Forecast (CF) Metadata conventions, Version 1.5, October, 2010. -# - - -# -# Context: -# This rule will always trigger. -# -# Purpose: -# Add standard meta-data to the cube. -# -fc_default - assert - python build_cube_metadata(engine) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a rotated pole. -# -# Purpose: -# Creates the rotated pole lat/lon coordinate system. -# -fc_provides_grid_mapping_rotated_latitude_longitude - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_ROTATED_LAT_LON) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_rotated_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a regular lat/lon. -# -# Purpose: -# Creates the lat/lon coordinate system. -# -fc_provides_grid_mapping_latitude_longitude - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_LAT_LON) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_coordinate_system(cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, latitude_longitude) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a transverse Mercator. -# -# Purpose: -# Creates the transverse Mercator coordinate system. -# -fc_provides_grid_mapping_transverse_mercator - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_TRANSVERSE) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_transverse_mercator_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, transverse_mercator) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a Mercator. -# -# Purpose: -# Creates the Mercator coordinate system. -# -fc_provides_grid_mapping_mercator - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_MERCATOR) - check has_supported_mercator_parameters(engine, $grid_mapping) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_mercator_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, mercator) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a stereographic. -# -# Purpose: -# Creates the stereographic coordinate system. -# -fc_provides_grid_mapping_stereographic - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_STEREO) - check has_supported_stereographic_parameters(engine, $grid_mapping) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_stereographic_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, stereographic) - python engine.rule_triggered.add(rule.name) - - -# -# Context: This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a Lambert conformal. -# -# Purpose: -# Creates the Lambert conformal conic coordinate system. -# -fc_provides_grid_mapping_lambert_conformal - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_LAMBERT_CONFORMAL) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_lambert_conformal_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, lambert_conformal) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a lambert azimuthal equal area. -# -# Purpose: -# Creates the lambert azimuthal equal area coordinate system. -# -fc_provides_grid_mapping_lambert_azimuthal_equal_area - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_LAMBERT_AZIMUTHAL) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, lambert_azimuthal_equal_area) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a albers conical equal area. -# -# Purpose: -# Creates the albers conical equal area coordinate system. -# -fc_provides_grid_mapping_albers_equal_area - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_ALBERS) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = build_albers_equal_area_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, albers_equal_area) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a vertical perspective. -# -# Purpose: -# Creates the vertical perspective coordinate system. -# -fc_provides_grid_mapping_vertical_perspective - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, CF_GRID_MAPPING_VERTICAL) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = \ - build_vertical_perspective_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, vertical_perspective) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a grid_mapping() case specific fact -# has been asserted that refers to a geostationary. -# -# Purpose: -# Creates the geostationary coordinate system. -# -fc_provides_grid_mapping_geostationary - foreach - facts_cf.grid_mapping($grid_mapping) - check is_grid_mapping(engine, $grid_mapping, - CF_GRID_MAPPING_GEOSTATIONARY) - assert - python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] - python coordinate_system = \ - build_geostationary_coordinate_system(engine, cf_grid_var) - python engine.cube_parts['coordinate_system'] = coordinate_system - facts_cf.provides(coordinate_system, geostationary) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF latitude coordinate. -# -# Purpose: -# Assert that the CF latitude coordinate exists. -# -fc_provides_coordinate_latitude - foreach - facts_cf.coordinate($coordinate) - check is_latitude(engine, $coordinate) - assert - facts_cf.provides(coordinate, latitude, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF longitude coordinate. -# -# Purpose: -# Assert that the CF longitude coordinate exists. -# -fc_provides_coordinate_longitude - foreach - facts_cf.coordinate($coordinate) - check is_longitude(engine, $coordinate) - assert - facts_cf.provides(coordinate, longitude, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF projection_x_coordinate. -# -# Purpose: -# Assert that the CF projection_x_coordinate exists. -# -fc_provides_projection_x_coordinate - foreach - facts_cf.coordinate($coordinate) - check is_projection_x_coordinate(engine, $coordinate) - assert - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF projection_y_coordinate. -# -# Purpose: -# Assert that the CF projection_y_coordinate exists. -# -fc_provides_projection_y_coordinate - foreach - facts_cf.coordinate($coordinate) - check is_projection_y_coordinate(engine, $coordinate) - assert - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# has been asserted that refers to a CF time coordinate. -# -# Purpose: -# Assert that the CF time coordinate exists. -# -fc_provides_coordinate_time - foreach - facts_cf.coordinate($coordinate) - check is_time(engine, $coordinate) - assert - facts_cf.provides(coordinate, time, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a coordinate() case specific fact -# had been asserted that contains units of time, but is not -# a time reference. -# -# Purpose: -# Assert that the forecast period coordinate exists. -# -fc_provides_coordinate_time_period - foreach - facts_cf.coordinate($coordinate) - check is_time_period(engine, $coordinate) - assert - facts_cf.provides(coordinate, time_period, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger for each label() case specific fact. -# -# Purpose: -# Add the label coordinate to the cube. -# -fc_build_label_coordinate - foreach - facts_cf.label($coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.labels[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that contains units that are a time reference or an -# axis of time. -# -# Purpose: -# Add the time reference auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_time - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_time(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that contains units of time, but is not a time reference. -# -# Purpose: -# Add the time period auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_time_period - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_time_period(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that refers to non-rotated pole latitude data. -# -# Purpose: -# Add the latitude auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_latitude - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_latitude(engine, $coordinate) - check not is_rotated_latitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LAT) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordiante() case specific fact -# has been asserted that refers to rotated pole latitude data. -# -# Purpose: -# Add the rotated pole latitude auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_latitude_rotated - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_latitude(engine, $coordinate) - check is_rotated_latitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_GRID_LAT) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that refers to non-rotated pole longitude data. -# -# Purpose: -# Add the longitude auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_longitude - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_longitude(engine, $coordinate) - check not is_rotated_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LON) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff an auxiliary_coordinate() case specific fact -# has been asserted that refers to rotated pole longitude data. -# -# Purpose: -# Add the rotated pole auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate_longitude_rotated - foreach - facts_cf.auxiliary_coordinate($coordinate) - check is_longitude(engine, $coordinate) - check is_rotated_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_GRID_LON) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger for each auxiliary_coordinate() case specific fact -# that is not a spatio-temporal related auxiliary coordinate. -# -# Purpose: -# Add the auxiliary coordinate to the cube. -# -fc_build_auxiliary_coordinate - foreach - facts_cf.auxiliary_coordinate($coordinate) - check not is_time(engine, $coordinate) - check not is_time_period(engine, $coordinate) - check not is_latitude(engine, $coordinate) - check not is_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[$coordinate] - python build_auxiliary_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger for each cell_measure case specific fact. -# -# Purpose: -# Add the cell measures attribute to the cube. -# -fc_build_cell_measure - foreach - facts_cf.cell_measure($coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.cell_measures[$coordinate] - python build_cell_measures(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger for each ancillary_variable case specific fact. -# -# Purpose: -# Add the ancillary variable to the cube. -# -fc_build_ancil_var - foreach - facts_cf.ancillary_variable($var) - assert - python ancil_var = engine.cf_var.cf_group.ancillary_variables[$var] - python build_ancil_var(engine, ancil_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF latitude coordinate exists and -# a lat/lon coordinate system exists. -# -# Purpose: -# Add the latitude coordinate into the cube. -# -fc_build_coordinate_latitude - foreach - facts_cf.provides(coordinate, latitude, $coordinate) - facts_cf.provides(coordinate_system, latitude_longitude) - check not is_rotated_latitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LAT, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF rotated latitude coordinate exists and -# a rotated lat/lon coordinate system exists. -# -# Purpose: -# Add the rotated latitude coordinate into the cube. -# -fc_build_coordinate_latitude_rotated - foreach - facts_cf.provides(coordinate, latitude, $coordinate) - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - check is_rotated_latitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_GRID_LAT, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF longitude coordinate exists and -# a lat/lon coordinate system exists. -# -# Purpose: -# Add the longitude coordinate into the cube. -# -fc_build_coordinate_longitude - foreach - facts_cf.provides(coordinate, longitude, $coordinate) - facts_cf.provides(coordinate_system, latitude_longitude) - check not is_rotated_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LON, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF rotated longitude coordinate exists and -# a rotated lat/lon coordinate system exists. -# -# Purpose: -# Add the rotated longitude coordinate into the cube. -# -fc_build_coordinate_longitude_rotated - foreach - facts_cf.provides(coordinate, longitude, $coordinate) - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - check is_rotated_longitude(engine, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_GRID_LON, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF latitude coordinate exists and -# no coordinate system exists. -# -# Purpose: -# Add the latitude coordinate into the cube. -# -fc_build_coordinate_latitude_nocs - foreach - facts_cf.provides(coordinate, latitude, $coordinate) - notany - facts_cf.provides(coordinate_system, latitude_longitude) - notany - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LAT, - coord_system=None) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF longitude coordinate exists and -# no lat/lon coordinate system exists. -# -# Purpose: -# Add the longitude coordinate into the cube. -# -fc_build_coordinate_longitude_nocs - foreach - facts_cf.provides(coordinate, longitude, $coordinate) - notany - facts_cf.provides(coordinate_system, latitude_longitude) - notany - facts_cf.provides(coordinate_system, rotated_latitude_longitude) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_LON, - coord_system=None) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a transverse Mercator coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_transverse_mercator - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, transverse_mercator) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a transverse Mercator coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_transverse_mercator - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, transverse_mercator) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a Lambert conformal coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_lambert_conformal - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, lambert_conformal) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a Lambert conformal coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_lambert_conformal - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, lambert_conformal) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a Mercator coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_mercator - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, mercator) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a Mercator coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_mercator - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, mercator) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a sterographic coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_stereographic - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, stereographic) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a stereographic coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_stereographic - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, stereographic) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a lambert azimuthal equal area coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_lambert_azimuthal_equal_area - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, lambert_azimuthal_equal_area) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a lambert azimuthal equal area coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_lambert_azimuthal_equal_area - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, lambert_azimuthal_equal_area) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a albers conical equal area coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_albers_equal_area - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, albers_equal_area) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a albers conical equal area coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_albers_equal_area - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, albers_equal_area) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a vertical perspective coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_vertical_perspective - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, vertical_perspective) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a vertical perspective coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_vertical_perspective - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, vertical_perspective) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a projection_x_coordinate coordinate exists and -# a geostationary coordinate system exists. -# -# Purpose: -# Add the projection_x_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_x_geostationary - foreach - facts_cf.provides(coordinate, projection_x_coordinate, $coordinate) - facts_cf.provides(coordinate_system, geostationary) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a projection_y_coordinate coordinate exists and -# a geostationary coordinate system exists. -# -# Purpose: -# Add the projection_y_coordinate coordinate into the cube. -# -fc_build_coordinate_projection_y_geostationary - foreach - facts_cf.provides(coordinate, projection_y_coordinate, $coordinate) - facts_cf.provides(coordinate_system, geostationary) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var, - coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.cube_parts['coordinate_system']) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a CF time coordinate exists. -# -# Purpose: -# Add the time coordinate into the cube. -# -fc_build_coordinate_time - foreach - facts_cf.provides(coordinate, time, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a time period coordinate exists. -# -# Purpose: -# Add the time period coordinate to the cube. -# -fc_build_coordinate_time_period - foreach - facts_cf.provides(coordinate, time_period, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff there exists an "unclassifed" coordinate. -# i.e. a coordinate that is not a latitude, longitude, time or vertical coordinate. -# -# Purpose: -# Add the miscellaneous coordinate into the cube. -# -fc_default_coordinate - foreach - facts_cf.coordinate($coordinate) - notany - facts_cf.provides(coordinate, $_, $coordinate) - assert - python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] - python build_dimension_coordinate(engine, cf_coord_var) - facts_cf.provides(coordinate, miscellaneous, $coordinate) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff the "um_stash_source" or "ukmo__um_stash_source" attributes exist -# on the CF-netCDF data variable. -# -# Purpose: -# Add the CF-netCDF data variable "um_stash_source" attribute to the -# cube attributes dictionary as a "STASH" key. -# -fc_attribute_ukmo__um_stash_source - foreach - check hasattr(engine.cf_var, 'ukmo__um_stash_source') or hasattr(engine.cf_var, 'um_stash_source') - assert - python attr_value = getattr(engine.cf_var, 'um_stash_source', None) or getattr(engine.cf_var, 'ukmo__um_stash_source') - python engine.cube.attributes['STASH'] = pp.STASH.from_msi(attr_value) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff the "ukmo__process_flags" attribute exists -# on the CF-netCDF data variable. -# -# Purpose: -# Add the CF-netCDF data variable "ukmo__process_flags" attribute to the -# cube attributes dictionary as a "ukmo__process_flags" key. -# -fc_attribute_ukmo__process_flags - foreach - check hasattr(engine.cf_var, 'ukmo__process_flags') - assert - python attr_value = engine.cf_var.ukmo__process_flags - python engine.cube.attributes['ukmo__process_flags'] = tuple([x.replace("_", " ") for x in attr_value.split(" ")]) - python engine.rule_triggered.add(rule.name) - - -# -# Context: -# This rule will trigger iff a formula term that refers to a -# dimensionless vertical coordinate of hybrid height. -# -# Purpose: -# Assert that the formula term refers to hybrid height. -# -fc_formula_type_atmosphere_hybrid_height_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'atmosphere_hybrid_height_coordinate' - assert - python engine.requires['formula_type'] = 'atmosphere_hybrid_height_coordinate' - facts_cf.formula_type(atmosphere_hybrid_height_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a formula term that refers to a -# dimensionless vertical coordinate of hybrid pressure. -# -# Purpose: -# Assert that the formula term refers to hybrid pressure. -# -fc_formula_type_atmosphere_hybrid_sigma_pressure_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'atmosphere_hybrid_sigma_pressure_coordinate' - assert - python engine.requires['formula_type'] = 'atmosphere_hybrid_sigma_pressure_coordinate' - facts_cf.formula_type(atmosphere_hybrid_height_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger iff a formula term that refers to a -# dimensionless vertical coordinate of ocean sigma over z -# -# Purpose: -# Assert that the formula term refers to ocean sigma over z. -# -fc_formula_type_ocean_sigma_z_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_sigma_z_coordinate' - assert - python engine.requires['formula_type'] = 'ocean_sigma_z_coordinate' - facts_cf.formula_type(ocean_sigma_z_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger if a formula term that refers to a -# dimensionless vertical coordinate of ocean sigma -# -# Purpose: -# Assert that the formula term refers to ocean sigma -# -fc_formula_type_ocean_sigma_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_sigma_coordinate' - assert - python engine.requires['formula_type'] = 'ocean_sigma_coordinate' - facts_cf.formula_type(ocean_sigma_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger if a formula term that refers to a -# dimensionless vertical coordinate of ocean s coordinate -# -# Purpose: -# Assert that the formula term refers to ocean sigma -# -fc_formula_type_ocean_s_coordinate - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_s_coordinate' - assert - python engine.requires['formula_type'] = 'ocean_s_coordinate' - facts_cf.formula_type(ocean_s_coordinate) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger if a formula term that refers to a -# dimensionless vertical coordinate of Ocean s-coordinate g1 -# -# Purpose: -# Assert that the formula term refers to Ocean s-coordinate g1 -# -fc_formula_type_ocean_s_coordinate_g1 - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_s_coordinate_g1' - assert - python engine.requires['formula_type'] = 'ocean_s_coordinate_g1' - facts_cf.formula_type(ocean_s_coordinate_g1) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger if a formula term that refers to a -# dimensionless vertical coordinate of Ocean s-coordinate g2 -# -# Purpose: -# Assert that the formula term refers to Ocean s-coordinate g2 -# -fc_formula_type_ocean_s_coordinate_g2 - foreach - facts_cf.formula_root($coordinate) - check getattr(engine.cf_var.cf_group[$coordinate], 'standard_name') == 'ocean_s_coordinate_g2' - assert - python engine.requires['formula_type'] = 'ocean_s_coordinate_g2' - facts_cf.formula_type(ocean_s_coordinate_g2) - python engine.rule_triggered.add(rule.name) - -# -# Context: -# This rule will trigger for variables referenced by a dimensionless -# vertical coordinate. -# -# Purpose: -# Build a mapping from term name to netCDF variable name. -# -fc_formula_terms - foreach - facts_cf.formula_root($coordinate) - facts_cf.formula_term($var_name, $coordinate, $term) - assert - python engine.requires.setdefault('formula_terms', {})[$term] = $var_name - python engine.rule_triggered.add(rule.name) - - -fc_extras - import warnings - - import cf_units - import netCDF4 - import numpy as np - import numpy.ma as ma - - import iris.aux_factory - from iris.common.mixin import _get_valid_standard_name - import iris.coords - import iris.coord_systems - import iris.fileformats.cf as cf - import iris.fileformats.netcdf - from iris.fileformats.netcdf import _get_cf_var_data, parse_cell_methods, UnknownCellMethodWarning - import iris.fileformats.pp as pp - import iris.exceptions - import iris.std_names - import iris.util - from iris._lazy_data import as_lazy_data - - - # - # UD Units Constants (based on Unidata udunits.dat definition file) - # - UD_UNITS_LAT = ['degrees_north', 'degree_north', 'degree_n', 'degrees_n', - 'degreen', 'degreesn', 'degrees', 'degrees north', - 'degree north', 'degree n', 'degrees n'] - UD_UNITS_LON = ['degrees_east', 'degree_east', 'degree_e', 'degrees_e', - 'degreee', 'degreese', 'degrees', 'degrees east', - 'degree east', 'degree e', 'degrees e'] - UNKNOWN_UNIT_STRING = "?" - NO_UNIT_STRING = "-" - - # - # CF Dimensionless Vertical Coordinates - # - CF_COORD_VERTICAL = {'atmosphere_ln_pressure_coordinate':['p0', 'lev'], - 'atmosphere_sigma_coordinate':['sigma', 'ps', 'ptop'], - 'atmosphere_hybrid_sigma_pressure_coordinate':['a', 'b', 'ps', 'p0'], - 'atmosphere_hybrid_height_coordinate':['a', 'b', 'orog'], - 'atmosphere_sleve_coordinate':['a', 'b1', 'b2', 'ztop', 'zsurf1', 'zsurf2'], - 'ocean_sigma_coordinate':['sigma', 'eta', 'depth'], - 'ocean_s_coordinate':['s', 'eta', 'depth', 'a', 'b', 'depth_c'], - 'ocean_sigma_z_coordinate':['sigma', 'eta', 'depth', 'depth_c', 'nsigma', 'zlev'], - 'ocean_double_sigma_coordinate':['sigma', 'depth', 'z1', 'z2', 'a', 'href', 'k_c'], - 'ocean_s_coordinate_g1':['s', 'eta', 'depth', 'depth_c', 'C'], - 'ocean_s_coordinate_g2':['s', 'eta', 'depth', 'depth_c', 'C']} - - # - # CF Grid Mappings - # - CF_GRID_MAPPING_ALBERS = 'albers_conical_equal_area' - CF_GRID_MAPPING_AZIMUTHAL = 'azimuthal_equidistant' - CF_GRID_MAPPING_LAMBERT_AZIMUTHAL = 'lambert_azimuthal_equal_area' - CF_GRID_MAPPING_LAMBERT_CONFORMAL = 'lambert_conformal_conic' - CF_GRID_MAPPING_LAMBERT_CYLINDRICAL = 'lambert_cylindrical_equal_area' - CF_GRID_MAPPING_LAT_LON = 'latitude_longitude' - CF_GRID_MAPPING_MERCATOR = 'mercator' - CF_GRID_MAPPING_ORTHO = 'orthographic' - CF_GRID_MAPPING_POLAR = 'polar_stereographic' - CF_GRID_MAPPING_ROTATED_LAT_LON = 'rotated_latitude_longitude' - CF_GRID_MAPPING_STEREO = 'stereographic' - CF_GRID_MAPPING_TRANSVERSE = 'transverse_mercator' - CF_GRID_MAPPING_VERTICAL = 'vertical_perspective' - CF_GRID_MAPPING_GEOSTATIONARY = 'geostationary' - - # - # CF Attribute Names. - # - CF_ATTR_AXIS = 'axis' - CF_ATTR_BOUNDS = 'bounds' - CF_ATTR_CALENDAR = 'calendar' - CF_ATTR_CLIMATOLOGY = 'climatology' - CF_ATTR_GRID_INVERSE_FLATTENING = 'inverse_flattening' - CF_ATTR_GRID_EARTH_RADIUS = 'earth_radius' - CF_ATTR_GRID_MAPPING_NAME = 'grid_mapping_name' - CF_ATTR_GRID_NORTH_POLE_LAT = 'grid_north_pole_latitude' - CF_ATTR_GRID_NORTH_POLE_LON = 'grid_north_pole_longitude' - CF_ATTR_GRID_NORTH_POLE_GRID_LON = 'north_pole_grid_longitude' - CF_ATTR_GRID_SEMI_MAJOR_AXIS = 'semi_major_axis' - CF_ATTR_GRID_SEMI_MINOR_AXIS = 'semi_minor_axis' - CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN = 'latitude_of_projection_origin' - CF_ATTR_GRID_LON_OF_PROJ_ORIGIN = 'longitude_of_projection_origin' - CF_ATTR_GRID_STANDARD_PARALLEL = 'standard_parallel' - CF_ATTR_GRID_FALSE_EASTING = 'false_easting' - CF_ATTR_GRID_FALSE_NORTHING = 'false_northing' - CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN = 'scale_factor_at_projection_origin' - CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN = 'scale_factor_at_central_meridian' - CF_ATTR_GRID_LON_OF_CENT_MERIDIAN = 'longitude_of_central_meridian' - CF_ATTR_GRID_STANDARD_PARALLEL = 'standard_parallel' - CF_ATTR_GRID_PERSPECTIVE_HEIGHT = 'perspective_point_height' - CF_ATTR_GRID_SWEEP_ANGLE_AXIS = 'sweep_angle_axis' - CF_ATTR_POSITIVE = 'positive' - CF_ATTR_STD_NAME = 'standard_name' - CF_ATTR_LONG_NAME = 'long_name' - CF_ATTR_UNITS = 'units' - CF_ATTR_CELL_METHODS = 'cell_methods' - - # - # CF Attribute Value Constants. - # - # Attribute - axis. - CF_VALUE_AXIS_X = 'x' - CF_VALUE_AXIS_Y = 'y' - CF_VALUE_AXIS_T = 't' - CF_VALUE_AXIS_Z = 'z' - - - # Attribute - positive. - CF_VALUE_POSITIVE = ['down', 'up'] - - # Attribute - standard_name. - CF_VALUE_STD_NAME_LAT = 'latitude' - CF_VALUE_STD_NAME_LON = 'longitude' - CF_VALUE_STD_NAME_GRID_LAT = 'grid_latitude' - CF_VALUE_STD_NAME_GRID_LON = 'grid_longitude' - CF_VALUE_STD_NAME_PROJ_X = 'projection_x_coordinate' - CF_VALUE_STD_NAME_PROJ_Y = 'projection_y_coordinate' - - - ################################################################################ - def build_cube_metadata(engine): - """Add the standard meta data to the cube.""" - - cf_var = engine.cf_var - cube = engine.cube - - # Determine the cube's name attributes - cube.var_name = cf_var.cf_name - standard_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - long_name = getattr(cf_var, CF_ATTR_LONG_NAME, None) - cube.long_name = long_name - - if standard_name is not None: - try: - cube.standard_name = _get_valid_standard_name(standard_name) - except ValueError: - if cube.long_name is not None: - cube.attributes['invalid_standard_name'] = standard_name - else: - cube.long_name = standard_name - - # Determine the cube units. - attr_units = get_attr_units(cf_var, cube.attributes) - cube.units = attr_units - - # Incorporate cell methods - nc_att_cell_methods = getattr(cf_var, CF_ATTR_CELL_METHODS, None) - with warnings.catch_warnings(record=True) as warning_records: - cube.cell_methods = parse_cell_methods(nc_att_cell_methods) - # Filter to get the warning we are interested in. - warning_records = [record for record in warning_records - if issubclass(record.category, UnknownCellMethodWarning)] - if len(warning_records) > 0: - # Output an enhanced warning message. - warn_record = warning_records[0] - name = '{}'.format(cf_var.cf_name) - msg = warn_record.message.args[0] - msg = msg.replace('variable', 'variable {!r}'.format(name)) - warnings.warn(message=msg, category=UnknownCellMethodWarning) - - # Set the cube global attributes. - for attr_name, attr_value in cf_var.cf_group.global_attributes.items(): - try: - cube.attributes[str(attr_name)] = attr_value - except ValueError as e: - msg = 'Skipping global attribute {!r}: {}' - warnings.warn(msg.format(attr_name, str(e))) - - - - ################################################################################ - def _get_ellipsoid(cf_grid_var): - """Return the ellipsoid definition.""" - major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) - minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) - inverse_flattening = getattr(cf_grid_var, CF_ATTR_GRID_INVERSE_FLATTENING, None) - - # Avoid over-specification exception. - if major is not None and minor is not None: - inverse_flattening = None - - # Check for a default spherical earth. - if major is None and minor is None and inverse_flattening is None: - major = getattr(cf_grid_var, CF_ATTR_GRID_EARTH_RADIUS, None) - - return major, minor, inverse_flattening - - - ################################################################################ - def build_coordinate_system(cf_grid_var): - """Create a coordinate system from the CF-netCDF grid mapping variable.""" - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - return iris.coord_systems.GeogCS(major, minor, inverse_flattening) - - - ################################################################################ - def build_rotated_coordinate_system(engine, cf_grid_var): - """Create a rotated coordinate system from the CF-netCDF grid mapping variable.""" - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - north_pole_latitude = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LAT, 90.0) - north_pole_longitude = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LON, 0.0) - if north_pole_latitude is None or north_pole_longitude is None: - warnings.warn('Rotated pole position is not fully specified') - - north_pole_grid_lon = getattr(cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0) - - ellipsoid = None - if major is not None or minor is not None or inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) - - rcs = iris.coord_systems.RotatedGeogCS(north_pole_latitude, north_pole_longitude, - north_pole_grid_lon, ellipsoid) - - return rcs - - - ################################################################################ - def build_transverse_mercator_coordinate_system(engine, cf_grid_var): - """ - Create a transverse Mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - scale_factor_at_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN, None) - - # The following accounts for the inconsistancy in the transverse - # mercator description within the CF spec. - if longitude_of_central_meridian is None: - longitude_of_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - if scale_factor_at_central_meridian is None: - scale_factor_at_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.TransverseMercator( - latitude_of_projection_origin, longitude_of_central_meridian, - false_easting, false_northing, scale_factor_at_central_meridian, - ellipsoid) - - return cs - - ################################################################################ - def build_lambert_conformal_coordinate_system(engine, cf_grid_var): - """ - Create a Lambert conformal conic coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.LambertConformal( - latitude_of_projection_origin, longitude_of_central_meridian, - false_easting, false_northing, standard_parallel, - ellipsoid) - - return cs - - ################################################################################ - def build_stereographic_coordinate_system(engine, cf_grid_var): - """ - Create a stereographic coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - # Iris currently only supports Stereographic projections with a scale - # factor of 1.0. This is checked elsewhere. - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.Stereographic( - latitude_of_projection_origin, longitude_of_projection_origin, - false_easting, false_northing, - true_scale_lat=None, - ellipsoid=ellipsoid) - - return cs - - ################################################################################ - def build_mercator_coordinate_system(engine, cf_grid_var): - """ - Create a Mercator coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - # Iris currently only supports Mercator projections with specific - # values for false_easting, false_northing, - # scale_factor_at_projection_origin and standard_parallel. These are - # checked elsewhere. - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.Mercator( - longitude_of_projection_origin, - ellipsoid=ellipsoid) - - return cs - - - ################################################################################ - def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): - """ - Create a lambert azimuthal equal area coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.LambertAzimuthalEqualArea( - latitude_of_projection_origin, longitude_of_projection_origin, - false_easting, false_northing, ellipsoid) - - return cs - - ################################################################################ - def build_albers_equal_area_coordinate_system(engine, cf_grid_var): - """ - Create a albers conical equal area coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_central_meridian = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - standard_parallels = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.AlbersEqualArea( - latitude_of_projection_origin, longitude_of_central_meridian, - false_easting, false_northing, standard_parallels, ellipsoid) - - return cs - - ################################################################################ - def build_vertical_perspective_coordinate_system(engine, cf_grid_var): - """ - Create a vertical perspective coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - perspective_point_height = getattr( - cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.VerticalPerspective( - latitude_of_projection_origin, longitude_of_projection_origin, - perspective_point_height, false_easting, false_northing, ellipsoid) - - return cs - - ################################################################################ - def build_geostationary_coordinate_system(engine, cf_grid_var): - """ - Create a geostationary coordinate system from the CF-netCDF - grid mapping variable. - - """ - major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) - - latitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None) - longitude_of_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None) - perspective_point_height = getattr( - cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None) - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - sweep_angle_axis = getattr( - cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None) - - ellipsoid = None - if major is not None or minor is not None or \ - inverse_flattening is not None: - ellipsoid = iris.coord_systems.GeogCS(major, minor, - inverse_flattening) - - cs = iris.coord_systems.Geostationary( - latitude_of_projection_origin, longitude_of_projection_origin, - perspective_point_height, sweep_angle_axis, false_easting, - false_northing, ellipsoid) - - return cs - - ################################################################################ - def get_attr_units(cf_var, attributes): - attr_units = getattr(cf_var, CF_ATTR_UNITS, UNKNOWN_UNIT_STRING) - if not attr_units: - attr_units = UNKNOWN_UNIT_STRING - - # Sanitise lat/lon units. - if attr_units in UD_UNITS_LAT or attr_units in UD_UNITS_LON: - attr_units = 'degrees' - - # Graceful loading of invalid units. - try: - cf_units.as_unit(attr_units) - except ValueError: - # Using converted unicode message. Can be reverted with Python 3. - msg = u'Ignoring netCDF variable {!r} invalid units {!r}'.format( - cf_var.cf_name, attr_units) - warnings.warn(msg) - attributes['invalid_units'] = attr_units - attr_units = UNKNOWN_UNIT_STRING - - if np.issubdtype(cf_var.dtype, np.str_): - attr_units = NO_UNIT_STRING - - if any(hasattr(cf_var.cf_data, name) for name in ("flag_values", "flag_masks", "flag_meanings")): - attr_units = cf_units._NO_UNIT_STRING - - # Get any assoicated calendar for a time reference coordinate. - if cf_units.as_unit(attr_units).is_time_reference(): - attr_calendar = getattr(cf_var, CF_ATTR_CALENDAR, None) - - if attr_calendar: - attr_units = cf_units.Unit(attr_units, calendar=attr_calendar) - - return attr_units - - - ################################################################################ - def get_names(cf_coord_var, coord_name, attributes): - """Determine the standard_name, long_name and var_name attributes.""" - - standard_name = getattr(cf_coord_var, CF_ATTR_STD_NAME, None) - long_name = getattr(cf_coord_var, CF_ATTR_LONG_NAME, None) - cf_name = str(cf_coord_var.cf_name) - - if standard_name is not None: - try: - standard_name = _get_valid_standard_name(standard_name) - except ValueError: - if long_name is not None: - attributes['invalid_standard_name'] = standard_name - if coord_name is not None: - standard_name = coord_name - else: - standard_name = None - else: - if coord_name is not None: - attributes['invalid_standard_name'] = standard_name - standard_name = coord_name - else: - standard_name = None - - else: - if coord_name is not None: - standard_name = coord_name - - # Last attempt to set the standard name to something meaningful. - if standard_name is None: - if cf_name in iris.std_names.STD_NAMES: - standard_name = cf_name - - return (standard_name, long_name, cf_name) - - - ################################################################################ - def get_cf_bounds_var(cf_coord_var): - """ - Return the CF variable representing the bounds of a coordinate - variable. - - """ - attr_bounds = getattr(cf_coord_var, CF_ATTR_BOUNDS, None) - attr_climatology = getattr(cf_coord_var, CF_ATTR_CLIMATOLOGY, None) - - # Determine bounds, prefering standard bounds over climatology. - # NB. No need to raise a warning if the bounds/climatology - # variable is missing, as that will already have been done by - # iris.fileformats.cf. - cf_bounds_var = None - climatological = False - if attr_bounds is not None: - bounds_vars = cf_coord_var.cf_group.bounds - if attr_bounds in bounds_vars: - cf_bounds_var = bounds_vars[attr_bounds] - elif attr_climatology is not None: - climatology_vars = cf_coord_var.cf_group.climatology - if attr_climatology in climatology_vars: - cf_bounds_var = climatology_vars[attr_climatology] - climatological = True - - if attr_bounds is not None and attr_climatology is not None: - warnings.warn('Ignoring climatology in favour of bounds attribute ' - 'on NetCDF variable {!r}.'.format( - cf_coord_var.cf_name)) - - return cf_bounds_var, climatological - - - ################################################################################ - def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): - """ - Return a bounds_data array with the vertex dimension as the most - rapidly varying. - - .. note:: - - This function assumes the dimension names of the coordinate - variable match those of the bounds variable in order to determine - which is the vertex dimension. - - - """ - vertex_dim_names = set(cf_bounds_var.dimensions).difference( - cf_coord_var.dimensions) - if len(vertex_dim_names) != 1: - msg = 'Too many dimension names differ between coordinate ' \ - 'variable {!r} and the bounds variable {!r}. ' \ - 'Expected 1, got {}.' - raise ValueError(msg.format(str(cf_coord_var.cf_name), - str(cf_bounds_var.cf_name), - len(vertex_dim_names))) - vertex_dim = cf_bounds_var.dimensions.index(*vertex_dim_names) - bounds_data = np.rollaxis(bounds_data.view(), vertex_dim, - len(bounds_data.shape)) - return bounds_data - - - ################################################################################ - def build_dimension_coordinate(engine, cf_coord_var, coord_name=None, coord_system=None): - """Create a dimension coordinate (DimCoord) and add it to the cube.""" - - cf_var = engine.cf_var - cube = engine.cube - attributes = {} - - attr_units = get_attr_units(cf_coord_var, attributes) - points_data = cf_coord_var[:] - # Gracefully fill points masked array. - if ma.is_masked(points_data): - points_data = ma.filled(points_data) - msg = 'Gracefully filling {!r} dimension coordinate masked points' - warnings.warn(msg.format(str(cf_coord_var.cf_name))) - - # Get any coordinate bounds. - cf_bounds_var, climatological = get_cf_bounds_var( - cf_coord_var) - if cf_bounds_var is not None: - bounds_data = cf_bounds_var[:] - # Gracefully fill bounds masked array. - if ma.is_masked(bounds_data): - bounds_data = ma.filled(bounds_data) - msg = 'Gracefully filling {!r} dimension coordinate masked bounds' - warnings.warn(msg.format(str(cf_coord_var.cf_name))) - # Handle transposed bounds where the vertex dimension is not - # the last one. Test based on shape to support different - # dimension names. - if cf_bounds_var.shape[:-1] != cf_coord_var.shape: - bounds_data = reorder_bounds_data(bounds_data, cf_bounds_var, - cf_coord_var) - else: - bounds_data = None - - # Determine whether the coordinate is circular. - circular = False - if points_data.ndim == 1 and coord_name in [CF_VALUE_STD_NAME_LON, CF_VALUE_STD_NAME_GRID_LON] \ - and cf_units.Unit(attr_units) in [cf_units.Unit('radians'), cf_units.Unit('degrees')]: - modulus_value = cf_units.Unit(attr_units).modulus - circular = iris.util._is_circular(points_data, modulus_value, bounds=bounds_data) - - # Determine the name of the dimension/s shared between the CF-netCDF data variable - # and the coordinate being built. - common_dims = [dim for dim in cf_coord_var.dimensions - if dim in cf_var.dimensions] - data_dims = None - if common_dims: - # Calculate the offset of each common dimension. - data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] - - # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names(cf_coord_var, coord_name, attributes) - - # Create the coordinate. - try: - coord = iris.coords.DimCoord(points_data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - bounds=bounds_data, - attributes=attributes, - coord_system=coord_system, - circular=circular, - climatological= - climatological) - except ValueError as e_msg: - # Attempt graceful loading. - coord = iris.coords.AuxCoord(points_data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - bounds=bounds_data, - attributes=attributes, - coord_system=coord_system, - climatological= - climatological) - cube.add_aux_coord(coord, data_dims) - msg = 'Failed to create {name!r} dimension coordinate: {error}\n' \ - 'Gracefully creating {name!r} auxiliary coordinate instead.' - warnings.warn(msg.format(name=str(cf_coord_var.cf_name), - error=e_msg)) - else: - # Add the dimension coordinate to the cube. - if data_dims: - cube.add_dim_coord(coord, data_dims) - else: - # Scalar coords are placed in the aux_coords container. - cube.add_aux_coord(coord, data_dims) - - # Update the coordinate to CF-netCDF variable mapping. - engine.cube_parts['coordinates'].append((coord, cf_coord_var.cf_name)) - - - ################################################################################ - def build_auxiliary_coordinate(engine, cf_coord_var, coord_name=None, coord_system=None): - """Create an auxiliary coordinate (AuxCoord) and add it to the cube.""" - - cf_var = engine.cf_var - cube = engine.cube - attributes = {} - - # Get units - attr_units = get_attr_units(cf_coord_var, attributes) - - # Get any coordinate point data. - if isinstance(cf_coord_var, cf.CFLabelVariable): - points_data = cf_coord_var.cf_label_data(cf_var) - else: - points_data = _get_cf_var_data(cf_coord_var, engine.filename) - - # Get any coordinate bounds. - cf_bounds_var, climatological = get_cf_bounds_var( - cf_coord_var) - if cf_bounds_var is not None: - bounds_data = _get_cf_var_data(cf_bounds_var, engine.filename) - - # Handle transposed bounds where the vertex dimension is not - # the last one. Test based on shape to support different - # dimension names. - if cf_bounds_var.shape[:-1] != cf_coord_var.shape: - # Resolving the data to a numpy array (i.e. *not* masked) for - # compatibility with array creators (i.e. dask) - bounds_data = np.asarray(bounds_data) - bounds_data = reorder_bounds_data(bounds_data, cf_bounds_var, - cf_coord_var) - else: - bounds_data = None - - # Determine the name of the dimension/s shared between the CF-netCDF data variable - # and the coordinate being built. - common_dims = [dim for dim in cf_coord_var.dimensions - if dim in cf_var.dimensions] - data_dims = None - if common_dims: - # Calculate the offset of each common dimension. - data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] - - # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names(cf_coord_var, coord_name, attributes) - - # Create the coordinate - coord = iris.coords.AuxCoord(points_data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - bounds=bounds_data, - attributes=attributes, - coord_system=coord_system, - climatological= - climatological) - - # Add it to the cube - cube.add_aux_coord(coord, data_dims) - - # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts['coordinates'].append((coord, cf_coord_var.cf_name)) - - - ################################################################################ - def build_cell_measures(engine, cf_cm_var): - """Create a CellMeasure instance and add it to the cube.""" - cf_var = engine.cf_var - cube = engine.cube - attributes = {} - - # Get units - attr_units = get_attr_units(cf_cm_var, attributes) - - # Get (lazy) content array - data = _get_cf_var_data(cf_cm_var, engine.filename) - - # Determine the name of the dimension/s shared between the CF-netCDF data variable - # and the coordinate being built. - common_dims = [dim for dim in cf_cm_var.dimensions - if dim in cf_var.dimensions] - data_dims = None - if common_dims: - # Calculate the offset of each common dimension. - data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] - - # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names(cf_cm_var, None, attributes) - - # Obtain the cf_measure. - measure = cf_cm_var.cf_measure - - # Create the CellMeasure - cell_measure = iris.coords.CellMeasure(data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - attributes=attributes, - measure=measure) - - # Add it to the cube - cube.add_cell_measure(cell_measure, data_dims) - - # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts['cell_measures'].append((cell_measure, cf_cm_var.cf_name)) - - - - ################################################################################ - def build_ancil_var(engine, cf_av_var): - """Create an AncillaryVariable instance and add it to the cube.""" - cf_var = engine.cf_var - cube = engine.cube - attributes = {} - - # Get units - attr_units = get_attr_units(cf_av_var, attributes) - - # Get (lazy) content array - data = _get_cf_var_data(cf_av_var, engine.filename) - - # Determine the name of the dimension/s shared between the CF-netCDF data variable - # and the AV being built. - common_dims = [dim for dim in cf_av_var.dimensions - if dim in cf_var.dimensions] - data_dims = None - if common_dims: - # Calculate the offset of each common dimension. - data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] - - # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names(cf_av_var, None, attributes) - - # Create the AncillaryVariable - av = iris.coords.AncillaryVariable( - data, - standard_name=standard_name, - long_name=long_name, - var_name=var_name, - units=attr_units, - attributes=attributes) - - # Add it to the cube - cube.add_ancillary_variable(av, data_dims) - - # Make a list with names, stored on the engine, so we can find them all later. - engine.cube_parts['ancillary_variables'].append((av, cf_av_var.cf_name)) - - - - ################################################################################ - def _is_lat_lon(cf_var, ud_units, std_name, std_name_grid, axis_name, prefixes): - """ - Determine whether the CF coordinate variable is a latitude/longitude variable. - - Ref: [CF] Section 4.1 Latitude Coordinate. - [CF] Section 4.2 Longitude Coordinate. - - """ - is_valid = False - attr_units = getattr(cf_var, CF_ATTR_UNITS, None) - - if attr_units is not None: - attr_units = attr_units.lower() - is_valid = attr_units in ud_units - - # Special case - Check for rotated pole. - if attr_units == 'degrees': - attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - if attr_std_name is not None: - is_valid = attr_std_name.lower() == std_name_grid - else: - is_valid = False - # TODO: check that this interpretation of axis is correct. - attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) - if attr_axis is not None: - is_valid = attr_axis.lower() == axis_name - else: - # Alternative is to check standard_name or axis. - attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - - if attr_std_name is not None: - attr_std_name = attr_std_name.lower() - is_valid = attr_std_name in [std_name, std_name_grid] - if not is_valid: - is_valid = any([attr_std_name.startswith(prefix) for prefix in prefixes]) - else: - attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) - - if attr_axis is not None: - is_valid = attr_axis.lower() == axis_name - - return is_valid - - - ################################################################################ - def is_latitude(engine, cf_name): - """Determine whether the CF coordinate variable is a latitude variable.""" - cf_var = engine.cf_var.cf_group[cf_name] - return _is_lat_lon(cf_var, UD_UNITS_LAT, CF_VALUE_STD_NAME_LAT, - CF_VALUE_STD_NAME_GRID_LAT, CF_VALUE_AXIS_Y, ['lat', 'rlat']) - - - ################################################################################ - def is_longitude(engine, cf_name): - """Determine whether the CF coordinate variable is a longitude variable.""" - cf_var = engine.cf_var.cf_group[cf_name] - return _is_lat_lon(cf_var, UD_UNITS_LON, CF_VALUE_STD_NAME_LON, - CF_VALUE_STD_NAME_GRID_LON, CF_VALUE_AXIS_X, ['lon', 'rlon']) - - - ################################################################################ - def is_projection_x_coordinate(engine, cf_name): - """ - Determine whether the CF coordinate variable is a - projection_x_coordinate variable. - - """ - cf_var = engine.cf_var.cf_group[cf_name] - attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or \ - getattr(cf_var, CF_ATTR_LONG_NAME, None) - return attr_name == CF_VALUE_STD_NAME_PROJ_X - - - ################################################################################ - def is_projection_y_coordinate(engine, cf_name): - """ - Determine whether the CF coordinate variable is a - projection_y_coordinate variable. - - """ - cf_var = engine.cf_var.cf_group[cf_name] - attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or \ - getattr(cf_var, CF_ATTR_LONG_NAME, None) - return attr_name == CF_VALUE_STD_NAME_PROJ_Y - - - ################################################################################ - def is_time(engine, cf_name): - """ - Determine whether the CF coordinate variable is a time variable. - - Ref: [CF] Section 4.4 Time Coordinate. - - """ - is_valid = False - cf_var = engine.cf_var.cf_group[cf_name] - attr_units = getattr(cf_var, CF_ATTR_UNITS, None) - - attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - attr_axis = getattr(cf_var, CF_ATTR_AXIS, '') - try: - is_time_reference = cf_units.Unit(attr_units or 1).is_time_reference() - except ValueError: - is_time_reference = False - - return is_time_reference and (attr_std_name=='time' or attr_axis.lower()==CF_VALUE_AXIS_T) - - - ################################################################################ - def is_time_period(engine, cf_name): - """Determine whether the CF coordinate variable represents a time period.""" - is_valid = False - cf_var = engine.cf_var.cf_group[cf_name] - attr_units = getattr(cf_var, CF_ATTR_UNITS, None) - - if attr_units is not None: - try: - is_valid = cf_units.is_time(attr_units) - except ValueError: - is_valid = False - - return is_valid - - - ################################################################################ - def is_grid_mapping(engine, cf_name, grid_mapping): - """Determine whether the CF grid mapping variable is of the appropriate type.""" - - is_valid = False - cf_var = engine.cf_var.cf_group[cf_name] - attr_mapping_name = getattr(cf_var, CF_ATTR_GRID_MAPPING_NAME, None) - - if attr_mapping_name is not None: - is_valid = attr_mapping_name.lower() == grid_mapping - - return is_valid - - - ################################################################################ - def _is_rotated(engine, cf_name, cf_attr_value): - """Determine whether the CF coordinate variable is rotated.""" - - is_valid = False - cf_var = engine.cf_var.cf_group[cf_name] - attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) - - if attr_std_name is not None: - is_valid = attr_std_name.lower() == cf_attr_value - else: - attr_units = getattr(cf_var, CF_ATTR_UNITS, None) - if attr_units is not None: - is_valid = attr_units.lower() == 'degrees' - - return is_valid - - - ################################################################################ - def is_rotated_latitude(engine, cf_name): - """Determine whether the CF coodinate variable is rotated latitude.""" - return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LAT) - - - ############################################################################### - def is_rotated_longitude(engine, cf_name): - """Determine whether the CF coordinate variable is rotated longitude.""" - return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LON) - - - ################################################################################ - def has_supported_mercator_parameters(engine, cf_name): - """Determine whether the CF grid mapping variable has the supported - values for the parameters of the Mercator projection.""" - - is_valid = True - cf_grid_var = engine.cf_var.cf_group[cf_name] - - false_easting = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) - false_northing = getattr( - cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) - scale_factor_at_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None) - standard_parallel = getattr( - cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None) - - if false_easting is not None and \ - false_easting != 0: - warnings.warn('False eastings other than 0.0 not yet supported ' - 'for Mercator projections') - is_valid = False - if false_northing is not None and \ - false_northing != 0: - warnings.warn('False northings other than 0.0 not yet supported ' - 'for Mercator projections') - is_valid = False - if scale_factor_at_projection_origin is not None and \ - scale_factor_at_projection_origin != 1: - warnings.warn('Scale factors other than 1.0 not yet supported for ' - 'Mercator projections') - is_valid = False - if standard_parallel is not None and \ - standard_parallel != 0: - warnings.warn('Standard parallels other than 0.0 not yet ' - 'supported for Mercator projections') - is_valid = False - - return is_valid - - - ################################################################################ - def has_supported_stereographic_parameters(engine, cf_name): - """Determine whether the CF grid mapping variable has a value of 1.0 - for the scale_factor_at_projection_origin attribute.""" - - is_valid = True - cf_grid_var = engine.cf_var.cf_group[cf_name] - - scale_factor_at_projection_origin = getattr( - cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None) - - if scale_factor_at_projection_origin is not None and \ - scale_factor_at_projection_origin != 1: - warnings.warn('Scale factors other than 1.0 not yet supported for ' - 'stereographic projections') - is_valid = False - - return is_valid - - - ################################################################################ - def _parse_cell_methods(cf_var_name, nc_cell_methods): - """Parse the CF cell_methods attribute string.""" - - cell_methods = [] - if nc_cell_methods is not None: - for m in CM_PARSE.finditer(nc_cell_methods): - d = m.groupdict() - method = d[CM_METHOD] - method = method.strip() - # Check validity of method, allowing for multi-part methods - # e.g. mean over years. - method_words = method.split() - if method_words[0].lower() not in CM_KNOWN_METHODS: - msg = 'NetCDF variable {!r} contains unknown cell ' \ - 'method {!r}' - warnings.warn(msg.format('{}'.format(cf_var_name), - '{}'.format(method_words[0]))) - d[CM_METHOD] = method - name = d[CM_NAME] - name = name.replace(' ', '') - name = name.rstrip(':') - d[CM_NAME] = tuple([n for n in name.split(':')]) - interval = [] - comment = [] - if d[CM_EXTRA] is not None: - # - # tokenise the key words and field colon marker - # - d[CM_EXTRA] = d[CM_EXTRA].replace('comment:', '<><<:>>') - d[CM_EXTRA] = d[CM_EXTRA].replace('interval:', '<><<:>>') - d[CM_EXTRA] = d[CM_EXTRA].split('<<:>>') - if len(d[CM_EXTRA]) == 1: - comment.extend(d[CM_EXTRA]) - else: - next_field_type = comment - for field in d[CM_EXTRA]: - field_type = next_field_type - index = field.rfind('<>') - if index == 0: - next_field_type = interval - continue - elif index > 0: - next_field_type = interval - else: - index = field.rfind('<>') - if index == 0: - next_field_type = comment - continue - elif index > 0: - next_field_type = comment - if index != -1: - field = field[:index] - field_type.append(field.strip()) - # - # cater for a shared interval over multiple axes - # - if len(interval): - if len(d[CM_NAME]) != len(interval) and len(interval) == 1: - interval = interval*len(d[CM_NAME]) - # - # cater for a shared comment over multiple axes - # - if len(comment): - if len(d[CM_NAME]) != len(comment) and len(comment) == 1: - comment = comment*len(d[CM_NAME]) - d[CM_INTERVAL] = tuple(interval) - d[CM_COMMENT] = tuple(comment) - cell_methods.append(iris.coords.CellMethod(d[CM_METHOD], coords=d[CM_NAME], intervals=d[CM_INTERVAL], comments=d[CM_COMMENT])) - return tuple(cell_methods) diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 91099464b1..df62027350 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -25,7 +25,6 @@ import netCDF4 import numpy as np import numpy.ma as ma -from pyke import knowledge_engine from iris._lazy_data import as_lazy_data from iris.aux_factory import ( @@ -41,17 +40,13 @@ import iris.coord_systems import iris.coords import iris.exceptions -import iris.fileformats._pyke_rules import iris.fileformats.cf +import iris.io import iris.util -# Show Pyke inference engine statistics. +# Show actions activation statistics. DEBUG = False -# Pyke CF related file names. -_PYKE_RULE_BASE = "fc_rules_cf" -_PYKE_FACT_BASE = "facts_cf" - # Standard CML spatio-temporal axis names. SPATIO_TEMPORAL_AXES = ["t", "z", "y", "x"] @@ -381,49 +376,13 @@ def coord(self, name): return result -def _pyke_kb_engine_real(): - """Return the PyKE knowledge engine for CF->cube conversion.""" - - pyke_dir = os.path.join(os.path.dirname(__file__), "_pyke_rules") - compile_dir = os.path.join(pyke_dir, "compiled_krb") - engine = None - - if os.path.exists(compile_dir): - tmpvar = [ - os.path.getmtime(os.path.join(compile_dir, fname)) - for fname in os.listdir(compile_dir) - if not fname.startswith("_") - ] - if tmpvar: - oldest_pyke_compile_file = min(tmpvar) - rule_age = os.path.getmtime( - os.path.join(pyke_dir, _PYKE_RULE_BASE + ".krb") - ) - - if oldest_pyke_compile_file >= rule_age: - # Initialise the pyke inference engine. - engine = knowledge_engine.engine( - (None, "iris.fileformats._pyke_rules.compiled_krb") - ) - - if engine is None: - engine = knowledge_engine.engine(iris.fileformats._pyke_rules) +def _actions_engine(): + # Return an 'actions engine', which provides a pyke-rules-like interface to + # the core cf translation code. + # Deferred import to avoid circularity. + import iris.fileformats._nc_load_rules.engine as nc_actions_engine - return engine - - -LOAD_PYKE = True - - -def _pyke_kb_engine(): - """Return a knowledge engine, or replacement object.""" - if LOAD_PYKE: - engine = _pyke_kb_engine_real() - else: - # Deferred import to avoid circularity. - import iris.fileformats._nc_load_rules.engine as nonpyke_engine - - engine = nonpyke_engine.Engine() + engine = nc_actions_engine.Engine() return engine @@ -470,45 +429,36 @@ def __setstate__(self, state): def _assert_case_specific_facts(engine, cf, cf_group): - # Initialise pyke engine "provides" hooks. - # These are used to patch non-processed element attributes after rules activation. + # Initialise a data store for built cube elements. + # This is used to patch element attributes *not* setup by the actions + # process, after the actions code has run. engine.cube_parts["coordinates"] = [] engine.cube_parts["cell_measures"] = [] engine.cube_parts["ancillary_variables"] = [] # Assert facts for CF coordinates. for cf_name in cf_group.coordinates.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "coordinate", (cf_name,) - ) + engine.add_case_specific_fact("coordinate", (cf_name,)) # Assert facts for CF auxiliary coordinates. for cf_name in cf_group.auxiliary_coordinates.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "auxiliary_coordinate", (cf_name,) - ) + engine.add_case_specific_fact("auxiliary_coordinate", (cf_name,)) # Assert facts for CF cell measures. for cf_name in cf_group.cell_measures.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "cell_measure", (cf_name,) - ) + engine.add_case_specific_fact("cell_measure", (cf_name,)) # Assert facts for CF ancillary variables. for cf_name in cf_group.ancillary_variables.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "ancillary_variable", (cf_name,) - ) + engine.add_case_specific_fact("ancillary_variable", (cf_name,)) # Assert facts for CF grid_mappings. for cf_name in cf_group.grid_mappings.keys(): - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "grid_mapping", (cf_name,) - ) + engine.add_case_specific_fact("grid_mapping", (cf_name,)) # Assert facts for CF labels. for cf_name in cf_group.labels.keys(): - engine.add_case_specific_fact(_PYKE_FACT_BASE, "label", (cf_name,)) + engine.add_case_specific_fact("label", (cf_name,)) # Assert facts for CF formula terms associated with the cf_group # of the CF data variable. @@ -520,35 +470,31 @@ def _assert_case_specific_facts(engine, cf, cf_group): if cf_root in cf_group: formula_root.add(cf_root) engine.add_case_specific_fact( - _PYKE_FACT_BASE, "formula_term", (cf_var.cf_name, cf_root, cf_term), ) for cf_root in formula_root: - engine.add_case_specific_fact( - _PYKE_FACT_BASE, "formula_root", (cf_root,) - ) + engine.add_case_specific_fact("formula_root", (cf_root,)) -def _pyke_stats(engine, cf_name): - if DEBUG: - print("-" * 80) - print("CF Data Variable: %r" % cf_name) +def _actions_activation_stats(engine, cf_name): + print("-" * 80) + print("CF Data Variable: %r" % cf_name) - engine.print_stats() + engine.print_stats() - print("Rules Triggered:") + print("Rules Triggered:") - for rule in sorted(list(engine.rule_triggered)): - print("\t%s" % rule) + for rule in sorted(list(engine.rule_triggered)): + print("\t%s" % rule) - print("Case Specific Facts:") - kb_facts = engine.get_kb(_PYKE_FACT_BASE) + print("Case Specific Facts:") + kb_facts = engine.get_kb() - for key in kb_facts.entity_lists.keys(): - for arg in kb_facts.entity_lists[key].case_specific_facts: - print("\t%s%s" % (key, arg)) + for key in kb_facts.entity_lists.keys(): + for arg in kb_facts.entity_lists[key].case_specific_facts: + print("\t%s%s" % (key, arg)) def _set_attributes(attributes, key, value): @@ -614,10 +560,10 @@ def _load_cube(engine, cf, cf_var, filename): data = _get_cf_var_data(cf_var, filename) cube = Cube(data) - # Reset the pyke inference engine. + # Reset the actions engine. engine.reset() - # Initialise pyke engine rule processing hooks. + # Initialise engine rule processing hooks. engine.cf_var = cf_var engine.cube = cube engine.cube_parts = {} @@ -625,11 +571,15 @@ def _load_cube(engine, cf, cf_var, filename): engine.rule_triggered = OrderedAddableList() # set() engine.filename = filename - # Assert any case-specific facts. + # Assert all the case-specific facts. + # This extracts 'facts' specific to this data-variable (aka cube), from + # the info supplied in the CFGroup object. _assert_case_specific_facts(engine, cf, cf_var.cf_group) - # Run pyke inference engine with forward chaining rules. - engine.activate(_PYKE_RULE_BASE) + # Run the actions engine. + # This creates various cube elements and attaches them to the cube. + # It also records various other info on the engine, to be processed later. + engine.activate() # Having run the rules, now populate the attributes of all the cf elements with the # "unused" attributes from the associated CF-netCDF variable. @@ -676,8 +626,9 @@ def fix_attributes_all_elements(role_name): for method in cube.cell_methods ] - # Show pyke session statistics. - _pyke_stats(engine, cf_var.cf_name) + if DEBUG: + # Show activation statistics for this data-var (i.e. cube). + _actions_activation_stats(engine, cf_var.cf_name) return cube @@ -816,8 +767,8 @@ def load_cubes(filenames, callback=None): """ from iris.io import run_callback - # Initialise the pyke inference engine. - engine = _pyke_kb_engine() + # Create an actions engine. + engine = _actions_engine() if isinstance(filenames, str): filenames = [filenames] diff --git a/lib/iris/tests/test_coding_standards.py b/lib/iris/tests/test_coding_standards.py index 5a89bf5e23..01f6f777fa 100644 --- a/lib/iris/tests/test_coding_standards.py +++ b/lib/iris/tests/test_coding_standards.py @@ -110,7 +110,6 @@ def test_license_headers(self): "docs/src/userguide/regridding_plots/*.py", "docs/src/_build/*", "lib/iris/analysis/_scipy_interpolate.py", - "lib/iris/fileformats/_pyke_rules/*", ) try: diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 4d92274fcf..36e06202d1 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -30,7 +30,7 @@ import iris.analysis.trajectory import iris.coord_systems as icoord_systems from iris.coords import AncillaryVariable, CellMeasure -import iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc as pyke_rules +from iris.fileformats._nc_load_rules import helpers as ncload_helpers import iris.fileformats.netcdf from iris.fileformats.netcdf import load_cubes as nc_load_cubes import iris.std_names @@ -522,13 +522,19 @@ def test_lat_lon_major_minor(self): minor = 63567523 self.grid.semi_major_axis = major self.grid.semi_minor_axis = minor - crs = pyke_rules.build_coordinate_system(self.grid) + # NB 'build_coordinate_system' has an extra (unused) 'engine' arg, just + # so that it has the same signature as other coord builder routines. + engine = None + crs = ncload_helpers.build_coordinate_system(engine, self.grid) self.assertEqual(crs, icoord_systems.GeogCS(major, minor)) def test_lat_lon_earth_radius(self): earth_radius = 63700000 self.grid.earth_radius = earth_radius - crs = pyke_rules.build_coordinate_system(self.grid) + # NB 'build_coordinate_system' has an extra (unused) 'engine' arg, just + # so that it has the same signature as other coord builder routines. + engine = None + crs = ncload_helpers.build_coordinate_system(engine, self.grid) self.assertEqual(crs, icoord_systems.GeogCS(earth_radius)) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py similarity index 72% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/__init__.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py index a8093f5c8c..2ea22c420b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/__init__.py @@ -4,6 +4,7 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Unit tests for the :mod:`iris.fileformats._pyke_rules.compiled_krb` module. +Unit tests for the module +:mod:`iris.fileformats.netcdf._nc_load_rules` . """ diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py similarity index 50% rename from lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index a85baaf856..678195c6a1 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -4,14 +4,10 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Unit tests for the engine.activate() call within the -`iris.fileformats.netcdf._load_cube` function. +Unit tests for the module :mod:`iris.fileformats._nc_load_rules.actions`. -For now, these tests are designed to function with **either** the "old" -Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the -"new" :mod:`iris.fileformats._nc_load_rules`. -Both of those supply an "engine" with an "activate" method - -- at least for now : may be simplified in future. +This module provides the engine.activate() call used in the function +`iris.fileformats.netcdf._load_cube`. """ from pathlib import Path @@ -19,8 +15,6 @@ import subprocess import tempfile -import numpy as np - import iris.fileformats._nc_load_rules.engine from iris.fileformats.cf import CFReader import iris.fileformats.netcdf @@ -31,8 +25,7 @@ IN cf : "def _load_cube(engine, cf, cf_var, filename)" WHERE: - - engine is a :class:`pyke.knowledge_engine.engine` - -- **OR** :class:`iris.fileformats._nc_load_rules.engine.Engine` + - engine is a :class:`iris.fileformats._nc_load_rules.engine.Engine` - cf is a CFReader - cf_var is a CFDAtaVariable @@ -72,21 +65,11 @@ class Mixin__nc_load_actions: """ - # - # "global" test settings - # - - # whether to test 'rules' or 'actions' implementations - # TODO: remove when Pyke is gone - use_pyke = True - - # whether to output various debug info + # "global" test setting : whether to output various debug info # TODO: ?possibly? remove when development is complete debug = False # whether to perform action in both ways and compare results. - compare_pyke_nonpyke = True - @classmethod def setUpClass(cls): # # Control which testing method we are applying. @@ -103,8 +86,6 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): Load the 'phenom' data variable in a CDL testcase, as a cube. Using ncgen, CFReader and the _load_cube call. - Can use a genuine Pyke engine, or the actions mimic engine, - selected by `self.use_pyke`. """ # Write the CDL to a file. @@ -121,91 +102,29 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): cf_var = list(cf.cf_group.data_variables.values())[0] cf_var = cf.cf_group.data_variables["phenom"] - do_pyke = self.use_pyke or self.compare_pyke_nonpyke - do_nonpyke = not self.use_pyke or self.compare_pyke_nonpyke - if do_pyke: - pyke_engine = iris.fileformats.netcdf._pyke_kb_engine_real() - if do_nonpyke: - nonpyke_engine = iris.fileformats._nc_load_rules.engine.Engine() + engine = iris.fileformats.netcdf._actions_engine() + # If debug enabled, switch on the activation summary debug output. iris.fileformats.netcdf.DEBUG = self.debug # Call the main translation function to load a single cube. - def translate_cube(engine): - # _load_cube establishes per-cube facts, activates rules and - # produces an actual cube. - cube = _load_cube(engine, cf, cf_var, nc_path) - - # Also Record, on the cubes, which hybrid coord elements were identified - # by the rules operation. - # Unlike the other translations, _load_cube does *not* convert this - # information into actual cube elements. That is instead done by - # `iris.fileformats.netcdf._load_aux_factory`. - # For rules testing, it is anyway more convenient to deal with the raw - # data, as each factory type has different validity requirements to - # build it, and none of that is relevant to the rules operation. - cube._formula_type_name = engine.requires.get("formula_type") - cube._formula_terms_byname = engine.requires.get("formula_terms") - - return cube - - if do_pyke: - pyke_cube = translate_cube(pyke_engine) - if do_nonpyke: - nonpyke_cube = translate_cube(nonpyke_engine) - - # If requested, directly compare the pyke and non-pyke outputs. - if self.compare_pyke_nonpyke: - # Compare the loaded cubes from both engines. - # print("\nPYKE-NONPYKE COMPARE") - - # Make a duplicate cube with un-masked cube data, as masked data - # does not compare well (i.e. cube1 == cube2 may yield 'masked' - # instead of a boolean). - def unmask_cube(cube): - # Make a copy, so that we can realise the data without - # modifying the original cube. - cube = cube.copy() - if isinstance(cube.data, np.ma.MaskedArray): - cube.data = cube.data.filled(0) - return cube - - pyke_cube_copy = unmask_cube(pyke_cube) - nonpyke_cube_copy = unmask_cube(nonpyke_cube) - if self.debug: - if nonpyke_cube_copy != pyke_cube_copy: - - def show_cube(cube): - result = str(cube) - result += "\n--coords--" - for coord in cube.coords(): - result += "\n " + str(coord) - result += "\n--attributes--" - if not cube.attributes: - result += "\n (none)" - else: - for key, value in cube.attributes.items(): - result += f"\n {key}: {value}" - return result - - print("\nPyke/nonpyke mismatch.") - print("Pyke cube:\n----") - print(show_cube(pyke_cube)) - print() - print("NONPyke cube:\n----") - print(show_cube(nonpyke_cube)) - print("") - else: - self.assertEqual(pyke_cube_copy, nonpyke_cube_copy) - - # Return the right thing, whether we did 'both' or not - if self.use_pyke: - result_cube = pyke_cube - else: - result_cube = nonpyke_cube + # _load_cube establishes per-cube facts, activates rules and + # produces an actual cube. + cube = _load_cube(engine, cf, cf_var, nc_path) + + # Also Record, on the cubes, which hybrid coord elements were identified + # by the rules operation. + # Unlike the other translations, _load_cube does *not* convert this + # information into actual cube elements. That is instead done by + # `iris.fileformats.netcdf._load_aux_factory`. + # For rules testing, it is anyway more convenient to deal with the raw + # data, as each factory type has different validity requirements to + # build it, and none of that is relevant to the rules operation. + cube._formula_type_name = engine.requires.get("formula_type") + cube._formula_terms_byname = engine.requires.get("formula_terms") # Always returns a single cube. - return result_cube + return cube def run_testcase(self, warning=None, **testcase_kwargs): """ diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py similarity index 96% rename from lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index e054baa431..ebea5acd45 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -10,10 +10,11 @@ Here, *specifically* testcases relating to grid-mappings and dim-coords. """ +import iris.tests as tests # isort: skip + import iris.coord_systems as ics import iris.fileformats._nc_load_rules.helpers as hh -import iris.tests as tests -from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( +from iris.tests.unit.fileformats.nc_load_rules.actions import ( Mixin__nc_load_actions, ) @@ -328,8 +329,15 @@ def check_result( self.assertEqual(yco_cs, cube_cs) -class Mixin__grid_mapping__tests(Mixin__grid_mapping): +class Test__grid_mapping(Mixin__grid_mapping, tests.IrisTest): # Various testcases for translation of grid-mappings + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() def test_basic_latlon(self): # A basic reference example with a lat-long grid. @@ -746,32 +754,8 @@ def test_mapping__mismatch__nonll_coords_missing_system(self): ) -class Test__grid_mapping__pyke_rules( - Mixin__grid_mapping__tests, tests.IrisTest -): - # Run grid-mapping tests with Pyke (rules) - use_pyke = True - debug = False - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - -from unittest import skip - - -@skip -class Test__grid_mapping__nonpyke_actions( - Mixin__grid_mapping__tests, tests.IrisTest -): - # Run grid-mapping tests with non-Pyke (actions) - use_pyke = False - +class Test__aux_latlons(Mixin__grid_mapping, tests.IrisTest): + # Testcases for translating auxiliary latitude+longitude variables @classmethod def setUpClass(cls): super().setUpClass() @@ -780,10 +764,6 @@ def setUpClass(cls): def tearDownClass(cls): super().tearDownClass() - -class Mixin__aux_latlons(Mixin__grid_mapping): - # Testcases for translating auxiliary latitude+longitude variables - def test_aux_lon(self): # Change the name of xdim, and put xco on the coords list. # @@ -849,11 +829,7 @@ def test_aux_lat_rotated(self): self.check_result(result, yco_is_aux=True, yco_no_cs=True) -class Test__aux_latlons__pyke_rules(Mixin__aux_latlons, tests.IrisTest): - # Run aux-latlons tests with Pyke (rules) - use_pyke = True - debug = False - +class Test__nondimcoords(Mixin__grid_mapping, tests.IrisTest): @classmethod def setUpClass(cls): super().setUpClass() @@ -862,6 +838,23 @@ def setUpClass(cls): def tearDownClass(cls): super().tearDownClass() + def test_nondim_lats(self): + # Check what happens when values don't allow a coord to be dim-coord. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude + # 006 : fc_build_coordinate_longitude + # NOTES: + # in terms of rule triggers, this is not distinct from a normal case + # - but the latitude is now an aux-coord. + warning = "must be.* monotonic" + result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) + self.check_result(result, yco_is_aux=True) + if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py similarity index 96% rename from lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py index 60e3253707..c4a3b37c01 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -10,14 +10,23 @@ Test rules activation relating to hybrid vertical coordinates. """ +import iris.tests as tests # isort: skip + import iris.fileformats._nc_load_rules.helpers as hh -import iris.tests as tests -from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( +from iris.tests.unit.fileformats.nc_load_rules.actions import ( Mixin__nc_load_actions, ) -class Mixin__formulae_tests(Mixin__nc_load_actions): +class Test__formulae_tests(Mixin__nc_load_actions, tests.IrisTest): + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + def _make_testcase_cdl( self, formula_root_name=None, term_names=None, extra_formula_type=None ): @@ -265,22 +274,9 @@ def inner(self): # variable, i.e. the loop variable, which does not work ! method_name = f"test_{hybrid_type}_coord" setattr( - Mixin__formulae_tests, method_name, construct_inner_func(hybrid_type) + Test__formulae_tests, method_name, construct_inner_func(hybrid_type) ) -class Test__formulae__withpyke(Mixin__formulae_tests, tests.IrisTest): - use_pyke = True - debug = False - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py similarity index 92% rename from lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py index a35a469e94..4ed90fd79a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__miscellaneous.py @@ -14,16 +14,25 @@ * ancillary variables """ +import iris.tests as tests # isort: skip + from iris.coords import AncillaryVariable, AuxCoord, CellMeasure from iris.fileformats.pp import STASH -import iris.tests as tests -from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( +from iris.tests.unit.fileformats.nc_load_rules.actions import ( Mixin__nc_load_actions, ) -class Mixin__ukmo_attributes(Mixin__nc_load_actions): +class Test__ukmo_attributes(Mixin__nc_load_actions, tests.IrisTest): # Tests for handling of the special UM-specific data-var attributes. + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + def _make_testcase_cdl(self, **add_attrs): phenom_attrs_string = "" for key, value in add_attrs.items(): @@ -99,13 +108,17 @@ def test_processflags_empty(self): self.check_result(cube, processflags=expected_result) -class Test__ukmo_attributes__withpyke(Mixin__ukmo_attributes, tests.IrisTest): - use_pyke = True - - -class Mixin__labels_cellmeasures_ancils(Mixin__nc_load_actions): +class Test__labels_cellmeasures_ancils(Mixin__nc_load_actions, tests.IrisTest): # Tests for some simple rules that translate facts directly into cube data, # with no alternative actions, complications or failure modes to test. + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + def _make_testcase_cdl( self, include_label=False, @@ -204,11 +217,5 @@ def test_cellmeasure(self): self.check_result(cube, expect_cellmeasure=True) -class Test__labels_cellmeasures_ancils__withpyke( - Mixin__labels_cellmeasures_ancils, tests.IrisTest -): - use_pyke = True - - if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py similarity index 97% rename from lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index c143d964ec..32f6dafc67 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -10,9 +10,10 @@ Tests for rules activation relating to 'time' and 'time_period' coords. """ +import iris.tests as tests # isort: skip + from iris.coords import AuxCoord, DimCoord -import iris.tests as tests -from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( +from iris.tests.unit.fileformats.nc_load_rules.actions import ( Mixin__nc_load_actions, ) @@ -391,11 +392,9 @@ def test_aux_no_coordsref(self): self.check_result(result, "aux") -class Test__time__withpyke(Mixin__singlecoord__tests, tests.IrisTest): +class Test__time(Mixin__singlecoord__tests, tests.IrisTest): # Run 'time' coord tests which = "time" - use_pyke = True - debug = False @classmethod def setUpClass(cls): @@ -406,11 +405,9 @@ def tearDownClass(cls): super().tearDownClass() -class Test__period__withpyke(Mixin__singlecoord__tests, tests.IrisTest): +class Test__period(Mixin__singlecoord__tests, tests.IrisTest): # Run 'time_period' coord tests which = "period" - use_pyke = True - debug = False @classmethod def setUpClass(cls): @@ -421,10 +418,18 @@ def tearDownClass(cls): super().tearDownClass() -class Mixin__dualcoord__tests(Mixin__timecoords__common): +class Test__dualcoord(Mixin__timecoords__common, tests.IrisTest): # Coordinate tests for a combination of 'time' and 'time_period'. # Not strictly necessary, as handling is independent, but a handy check # on typical usage. + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + def test_time_and_period(self): # Test case with both 'time' and 'period', with separate dims. # Rules Triggered: @@ -456,18 +461,5 @@ def test_time_dim_period_aux(self): self.check_result(result, time_is="dim", period_is="aux") -class Test__dualcoord_tests__withpyke(Mixin__dualcoord__tests, tests.IrisTest): - use_pyke = True - debug = False - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - if __name__ == "__main__": tests.main() diff --git a/lib/iris/fileformats/_pyke_rules/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py similarity index 68% rename from lib/iris/fileformats/_pyke_rules/__init__.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py index ac5753e58b..e6508bea85 100644 --- a/lib/iris/fileformats/_pyke_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/__init__.py @@ -3,3 +3,8 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. +""" +Unit tests for the module +:mod:`iris.fileformats.netcdf._nc_load_rules.engine` . + +""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py similarity index 90% rename from lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py index efc1f7e287..df5fbd4922 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test_engine.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/engine/test_engine.py @@ -6,8 +6,6 @@ """ Unit tests for the :mod:`iris.fileformats._nc_load_rules.engine` module. -TODO: relocate under iris/tests/unit/fileformats/nc_load_rules - """ from unittest import mock @@ -49,7 +47,7 @@ def test_activate(self): def test_add_case_specific_fact__newname(self): # Adding a new fact to a new fact-name records as expected. engine = self.nonempty_engine - engine.add_case_specific_fact("junkname", "new_fact", ("a1", "a2")) + engine.add_case_specific_fact("new_fact", ("a1", "a2")) self.assertEqual(engine.fact_list("new_fact"), [("a1", "a2")]) def test_add_case_specific_fact__existingname(self): @@ -57,7 +55,7 @@ def test_add_case_specific_fact__existingname(self): engine = self.nonempty_engine name = "this" self.assertEqual(engine.fact_list(name), [("that", "other")]) - engine.add_case_specific_fact("junkname", name, ("yetanother",)) + engine.add_case_specific_fact(name, ("yetanother",)) self.assertEqual( engine.fact_list(name), [("that", "other"), ("yetanother",)] ) @@ -65,7 +63,7 @@ def test_add_case_specific_fact__existingname(self): def test_add_case_specific_fact__emptyargs(self): # Check that empty args work ok, and will create a new fact. engine = self.empty_engine - engine.add_case_specific_fact("junkname", "new_fact", ()) + engine.add_case_specific_fact("new_fact", ()) self.assertIn("new_fact", engine.facts.entity_lists) self.assertEqual(engine.fact_list("new_fact"), [()]) @@ -82,7 +80,7 @@ def test_add_fact(self): self.assertEqual(acsf_call.call_count, 1) self.assertEqual( acsf_call.call_args_list, - [mock.call(kb_name="", fact_name="extra", fact_arglist=())], + [mock.call(fact_name="extra", fact_arglist=())], ) def test_get_kb(self): diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py similarity index 67% rename from lib/iris/tests/unit/fileformats/pyke_rules/__init__.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py index 71d129e4a7..69a536b9ae 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/__init__.py @@ -3,4 +3,8 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the :mod:`iris.fileformats._pyke_rules` module.""" +""" +Unit tests for the module +:mod:`iris.fileformats.netcdf._nc_load_rules.helpers` . + +""" diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_albers_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py similarity index 79% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_albers_equal_area_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py index 62e6d2e6b2..c040d43ca0 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_albers_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_albers_equal_area_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_albers_equal_area_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_albers_equal_area_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import AlbersEqualArea -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_albers_equal_area_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_albers_equal_area_coordinate_system, +) class TestBuildAlbersEqualAreaCoordinateSystem(tests.IrisTest): @@ -44,19 +45,21 @@ def _test(self, inverse_flattening=False, no_optionals=False): longitude_of_central_meridian=test_lon, false_easting=test_easting, false_northing=test_northing, - standard_parallel=test_parallels) + standard_parallel=test_parallels, + ) # Add ellipsoid args. - gridvar_props['semi_major_axis'] = 6377563.396 + gridvar_props["semi_major_axis"] = 6377563.396 if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) cf_grid_var = mock.Mock(spec=[], **gridvar_props) @@ -68,7 +71,8 @@ def _test(self, inverse_flattening=False, no_optionals=False): false_easting=test_easting, false_northing=test_northing, standard_parallels=test_parallels, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py similarity index 68% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py index bfe55a79a7..95f892454b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_auxiliary_coordinate.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_auxilliary_coordinate`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_auxilliary_coordinate`. """ @@ -18,52 +18,55 @@ import numpy as np from iris.coords import AuxCoord +from iris.fileformats._nc_load_rules.helpers import build_auxiliary_coordinate from iris.fileformats.cf import CFVariable -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_auxiliary_coordinate -# from iris.tests.unit.fileformats.pyke_rules.compiled_krb\ -# .fc_rules_cf_fc.test_build_dimension_coordinate import RulesTestMixin - class TestBoundsVertexDim(tests.IrisTest): # Lookup for various tests (which change the dimension order). dim_names_lens = { - 'foo': 2, 'bar': 3, 'nv': 4, + "foo": 2, + "bar": 3, + "nv": 4, # 'x' and 'y' used as aliases for 'foo' and 'bar' - 'x': 2, 'y': 3} + "x": 2, + "y": 3, + } def setUp(self): # Create coordinate cf variables and pyke engine. - dimension_names = ('foo', 'bar') + dimension_names = ("foo", "bar") points, cf_data = self._make_array_and_cf_data(dimension_names) self.cf_coord_var = mock.Mock( spec=CFVariable, dimensions=dimension_names, - cf_name='wibble', + cf_name="wibble", cf_data=cf_data, standard_name=None, - long_name='wibble', - units='m', + long_name="wibble", + units="m", shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) expected_bounds, _ = self._make_array_and_cf_data( - dimension_names=('foo', 'bar', 'nv')) + dimension_names=("foo", "bar", "nv") + ) self.expected_coord = AuxCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=expected_bounds) + bounds=expected_bounds, + ) self.engine = mock.Mock( cube=mock.Mock(), - cf_var=mock.Mock(dimensions=('foo', 'bar'), - cf_data=cf_data), - filename='DUMMY', - cube_parts=dict(coordinates=[])) + cf_var=mock.Mock(dimensions=("foo", "bar"), cf_data=cf_data), + filename="DUMMY", + cube_parts=dict(coordinates=[]), + ) # Patch the deferred loading that prevents attempted file access. # This assumes that self.cf_bounds_var is defined in the test case. @@ -73,8 +76,10 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.patch('iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', - new=patched__getitem__) + self.patch( + "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", + new=patched__getitem__, + ) # Patch the helper function that retrieves the bounds cf variable, # and a False flag for climatological. @@ -83,14 +88,14 @@ def _get_per_test_bounds_var(_coord_unused): # Return the 'cf_bounds_var' created by the current test. return (self.cf_bounds_var, False) - self.patch('iris.fileformats._pyke_rules.compiled_krb.' - 'fc_rules_cf_fc.get_cf_bounds_var', - new=_get_per_test_bounds_var) + self.patch( + "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", + new=_get_per_test_bounds_var, + ) @classmethod def _make_array_and_cf_data(cls, dimension_names): - shape = tuple(cls.dim_names_lens[name] - for name in dimension_names) + shape = tuple(cls.dim_names_lens[name] for name in dimension_names) cf_data = mock.MagicMock(_FillValue=None, spec=[]) cf_data.chunking = mock.MagicMock(return_value=shape) return np.zeros(shape), cf_data @@ -101,43 +106,45 @@ def _make_cf_bounds_var(self, dimension_names): cf_bounds_var = mock.Mock( spec=CFVariable, dimensions=dimension_names, - cf_name='wibble_bnds', + cf_name="wibble_bnds", cf_data=cf_data, shape=bounds.shape, dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) return bounds, cf_bounds_var def _check_case(self, dimension_names): bounds, self.cf_bounds_var = self._make_cf_bounds_var( - dimension_names=dimension_names) + dimension_names=dimension_names + ) # Asserts must lie within context manager because of deferred loading. build_auxiliary_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. self.engine.cube.add_aux_coord.assert_called_with( - self.expected_coord, [0, 1]) + self.expected_coord, [0, 1] + ) # Test that engine.cube_parts container is correctly populated. expected_list = [(self.expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts['coordinates'], - expected_list) + self.assertEqual(self.engine.cube_parts["coordinates"], expected_list) def test_fastest_varying_vertex_dim(self): # The usual order. - self._check_case(dimension_names=('foo', 'bar', 'nv')) + self._check_case(dimension_names=("foo", "bar", "nv")) def test_slowest_varying_vertex_dim(self): # Bounds in the first (slowest varying) dimension. - self._check_case(dimension_names=('nv', 'foo', 'bar')) + self._check_case(dimension_names=("nv", "foo", "bar")) def test_fastest_with_different_dim_names(self): # Despite the dimension names ('x', and 'y') differing from the coord's # which are 'foo' and 'bar' (as permitted by the cf spec), # this should still work because the vertex dim is the fastest varying. - self._check_case(dimension_names=('x', 'y', 'nv')) + self._check_case(dimension_names=("x", "y", "nv")) class TestDtype(tests.IrisTest): @@ -149,21 +156,23 @@ def setUp(self): self.cf_coord_var = mock.Mock( spec=CFVariable, - dimensions=('foo', 'bar'), - cf_name='wibble', + dimensions=("foo", "bar"), + cf_name="wibble", cf_data=cf_data, standard_name=None, - long_name='wibble', - units='m', + long_name="wibble", + units="m", shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) self.engine = mock.Mock( cube=mock.Mock(), - cf_var=mock.Mock(dimensions=('foo', 'bar')), - filename='DUMMY', - cube_parts=dict(coordinates=[])) + cf_var=mock.Mock(dimensions=("foo", "bar")), + filename="DUMMY", + cube_parts=dict(coordinates=[]), + ) def patched__getitem__(proxy_self, keys): if proxy_self.variable_name == self.cf_coord_var.cf_name: @@ -171,8 +180,9 @@ def patched__getitem__(proxy_self, keys): raise RuntimeError() self.deferred_load_patch = mock.patch( - 'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', - new=patched__getitem__) + "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", + new=patched__getitem__, + ) def test_scale_factor_add_offset_int(self): self.cf_coord_var.scale_factor = 3 @@ -181,26 +191,26 @@ def test_scale_factor_add_offset_int(self): with self.deferred_load_patch: build_auxiliary_coordinate(self.engine, self.cf_coord_var) - coord, _ = self.engine.cube_parts['coordinates'][0] - self.assertEqual(coord.dtype.kind, 'i') + coord, _ = self.engine.cube_parts["coordinates"][0] + self.assertEqual(coord.dtype.kind, "i") def test_scale_factor_float(self): - self.cf_coord_var.scale_factor = 3. + self.cf_coord_var.scale_factor = 3.0 with self.deferred_load_patch: build_auxiliary_coordinate(self.engine, self.cf_coord_var) - coord, _ = self.engine.cube_parts['coordinates'][0] - self.assertEqual(coord.dtype.kind, 'f') + coord, _ = self.engine.cube_parts["coordinates"][0] + self.assertEqual(coord.dtype.kind, "f") def test_add_offset_float(self): - self.cf_coord_var.add_offset = 5. + self.cf_coord_var.add_offset = 5.0 with self.deferred_load_patch: build_auxiliary_coordinate(self.engine, self.cf_coord_var) - coord, _ = self.engine.cube_parts['coordinates'][0] - self.assertEqual(coord.dtype.kind, 'f') + coord, _ = self.engine.cube_parts["coordinates"][0] + self.assertEqual(coord.dtype.kind, "f") class TestCoordConstruction(tests.IrisTest): @@ -208,35 +218,40 @@ def setUp(self): # Create dummy pyke engine. self.engine = mock.Mock( cube=mock.Mock(), - cf_var=mock.Mock(dimensions=('foo', 'bar')), - filename='DUMMY', - cube_parts=dict(coordinates=[])) + cf_var=mock.Mock(dimensions=("foo", "bar")), + filename="DUMMY", + cube_parts=dict(coordinates=[]), + ) points = np.arange(6) self.cf_coord_var = mock.Mock( - dimensions=('foo',), + dimensions=("foo",), scale_factor=1, add_offset=0, - cf_name='wibble', - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), + cf_name="wibble", + cf_data=mock.MagicMock( + chunking=mock.Mock(return_value=None), spec=[] + ), standard_name=None, - long_name='wibble', - units='days since 1970-01-01', + long_name="wibble", + units="days since 1970-01-01", calendar=None, shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( - dimensions=('x', 'nv'), + dimensions=("x", "nv"), scale_factor=1, add_offset=0, - cf_name='wibble_bnds', + cf_name="wibble_bnds", cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), shape=bounds.shape, dtype=bounds.dtype, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) self.bounds = bounds # Create patch for deferred loading that prevents attempted @@ -248,8 +263,10 @@ def patched__getitem__(proxy_self, keys): return var[keys] raise RuntimeError() - self.patch('iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', - new=patched__getitem__) + self.patch( + "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", + new=patched__getitem__, + ) # Patch the helper function that retrieves the bounds cf variable. # This avoids the need for setting up further mocking of cf objects. @@ -258,9 +275,10 @@ def patched__getitem__(proxy_self, keys): def get_cf_bounds_var(coord_var): return self.cf_bounds_var, self.use_climatology_bounds - self.patch('iris.fileformats._pyke_rules.compiled_krb.' - 'fc_rules_cf_fc.get_cf_bounds_var', - new=get_cf_bounds_var) + self.patch( + "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", + new=get_cf_bounds_var, + ) def check_case_aux_coord_construction(self, climatology=False): # Test a generic auxiliary coordinate, with or without @@ -273,13 +291,13 @@ def check_case_aux_coord_construction(self, climatology=False): var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, bounds=self.bounds, - climatological=climatology) + climatological=climatology, + ) build_auxiliary_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. - self.engine.cube.add_aux_coord.assert_called_with( - expected_coord, [0]) + self.engine.cube.add_aux_coord.assert_called_with(expected_coord, [0]) def test_aux_coord_construction(self): self.check_case_aux_coord_construction(climatology=False) @@ -288,5 +306,5 @@ def test_aux_coord_construction__climatology(self): self.check_case_aux_coord_construction(climatology=True) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py similarity index 67% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py index de3354901b..a13fa6cca0 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_cube_metadata.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_cube_metadata`. +Test function :func:`iris.fileformats._nc_load_rules.helpers\ +build_cube_metadata`. """ @@ -18,8 +18,7 @@ import numpy as np from iris.cube import Cube -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_cube_metadata +from iris.fileformats._nc_load_rules.helpers import build_cube_metadata def _make_engine(global_attributes=None, standard_name=None, long_name=None): @@ -29,44 +28,48 @@ def _make_engine(global_attributes=None, standard_name=None, long_name=None): cf_group = mock.Mock(global_attributes=global_attributes) cf_var = mock.MagicMock( - cf_name='wibble', + cf_name="wibble", standard_name=standard_name, long_name=long_name, - units='m', + units="m", dtype=np.float64, cell_methods=None, - cf_group=cf_group) + cf_group=cf_group, + ) - engine = mock.Mock( - cube=Cube([23]), - cf_var=cf_var) + engine = mock.Mock(cube=Cube([23]), cf_var=cf_var) return engine class TestInvalidGlobalAttributes(tests.IrisTest): def test_valid(self): - global_attributes = {'Conventions': 'CF-1.5', - 'comment': 'Mocked test object'} + global_attributes = { + "Conventions": "CF-1.5", + "comment": "Mocked test object", + } engine = _make_engine(global_attributes) build_cube_metadata(engine) expected = global_attributes self.assertEqual(engine.cube.attributes, expected) def test_invalid(self): - global_attributes = {'Conventions': 'CF-1.5', - 'comment': 'Mocked test object', - 'calendar': 'standard'} + global_attributes = { + "Conventions": "CF-1.5", + "comment": "Mocked test object", + "calendar": "standard", + } engine = _make_engine(global_attributes) - with mock.patch('warnings.warn') as warn: + with mock.patch("warnings.warn") as warn: build_cube_metadata(engine) # Check for a warning. self.assertEqual(warn.call_count, 1) - self.assertIn("Skipping global attribute 'calendar'", - warn.call_args[0][0]) + self.assertIn( + "Skipping global attribute 'calendar'", warn.call_args[0][0] + ) # Check resulting attributes. The invalid entry 'calendar' # should be filtered out. - global_attributes.pop('calendar') + global_attributes.pop("calendar") expected = global_attributes self.assertEqual(engine.cube.attributes, expected) @@ -91,28 +94,31 @@ def test_standard_name_none_long_name_none(self): self.check_cube_names(inputs, expected) def test_standard_name_none_long_name_set(self): - inputs = (None, 'ice_thickness_long_name') - expected = (None, 'ice_thickness_long_name') + inputs = (None, "ice_thickness_long_name") + expected = (None, "ice_thickness_long_name") self.check_cube_names(inputs, expected) def test_standard_name_valid_long_name_none(self): - inputs = ('sea_ice_thickness', None) - expected = ('sea_ice_thickness', None) + inputs = ("sea_ice_thickness", None) + expected = ("sea_ice_thickness", None) self.check_cube_names(inputs, expected) def test_standard_name_valid_long_name_set(self): - inputs = ('sea_ice_thickness', 'ice_thickness_long_name') - expected = ('sea_ice_thickness', 'ice_thickness_long_name') + inputs = ("sea_ice_thickness", "ice_thickness_long_name") + expected = ("sea_ice_thickness", "ice_thickness_long_name") self.check_cube_names(inputs, expected) def test_standard_name_invalid_long_name_none(self): - inputs = ('not_a_standard_name', None) - expected = (None, 'not_a_standard_name',) + inputs = ("not_a_standard_name", None) + expected = ( + None, + "not_a_standard_name", + ) self.check_cube_names(inputs, expected) def test_standard_name_invalid_long_name_set(self): - inputs = ('not_a_standard_name', 'ice_thickness_long_name') - expected = (None, 'ice_thickness_long_name') + inputs = ("not_a_standard_name", "ice_thickness_long_name") + expected = (None, "ice_thickness_long_name") self.check_cube_names(inputs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py similarity index 72% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py index 50d81b2c1f..a75678d923 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_dimension_coordinate.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_dimension_coordinate`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_dimension_coordinate`. """ @@ -19,8 +19,7 @@ import numpy as np from iris.coords import AuxCoord, DimCoord -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_dimension_coordinate +from iris.fileformats._nc_load_rules.helpers import build_dimension_coordinate class RulesTestMixin: @@ -28,9 +27,10 @@ def setUp(self): # Create dummy pyke engine. self.engine = mock.Mock( cube=mock.Mock(), - cf_var=mock.Mock(dimensions=('foo', 'bar')), - filename='DUMMY', - cube_parts=dict(coordinates=[])) + cf_var=mock.Mock(dimensions=("foo", "bar")), + filename="DUMMY", + cube_parts=dict(coordinates=[]), + ) # Create patch for deferred loading that prevents attempted # file access. This assumes that self.cf_coord_var and @@ -42,8 +42,9 @@ def patched__getitem__(proxy_self, keys): raise RuntimeError() self.deferred_load_patch = mock.patch( - 'iris.fileformats.netcdf.NetCDFDataProxy.__getitem__', - new=patched__getitem__) + "iris.fileformats.netcdf.NetCDFDataProxy.__getitem__", + new=patched__getitem__, + ) # Patch the helper function that retrieves the bounds cf variable. # This avoids the need for setting up further mocking of cf objects. @@ -53,9 +54,9 @@ def get_cf_bounds_var(coord_var): return self.cf_bounds_var, self.use_climatology_bounds self.get_cf_bounds_var_patch = mock.patch( - 'iris.fileformats._pyke_rules.compiled_krb.' - 'fc_rules_cf_fc.get_cf_bounds_var', - new=get_cf_bounds_var) + "iris.fileformats._nc_load_rules.helpers.get_cf_bounds_var", + new=get_cf_bounds_var, + ) class TestCoordConstruction(tests.IrisTest, RulesTestMixin): @@ -65,24 +66,26 @@ def setUp(self): bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( - dimensions=('x', 'nv'), - cf_name='wibble_bnds', + dimensions=("x", "nv"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) self.bounds = bounds def _set_cf_coord_var(self, points): self.cf_coord_var = mock.Mock( - dimensions=('foo',), - cf_name='wibble', + dimensions=("foo",), + cf_name="wibble", cf_data=mock.Mock(spec=[]), standard_name=None, - long_name='wibble', - units='days since 1970-01-01', + long_name="wibble", + units="days since 1970-01-01", calendar=None, shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) def check_case_dim_coord_construction(self, climatology=False): # Test a generic dimension coordinate, with or without @@ -96,7 +99,8 @@ def check_case_dim_coord_construction(self, climatology=False): var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, bounds=self.bounds, - climatological=climatology) + climatological=climatology, + ) # Asserts must lie within context manager because of deferred loading. with self.deferred_load_patch, self.get_cf_bounds_var_patch: @@ -104,7 +108,8 @@ def check_case_dim_coord_construction(self, climatology=False): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) def test_dim_coord_construction(self): self.check_case_dim_coord_construction(climatology=False) @@ -113,18 +118,21 @@ def test_dim_coord_construction__climatology(self): self.check_case_dim_coord_construction(climatology=True) def test_dim_coord_construction_masked_array(self): - self._set_cf_coord_var(np.ma.array( - np.arange(6), - mask=[True, False, False, False, False, False], - fill_value=-999, - )) + self._set_cf_coord_var( + np.ma.array( + np.arange(6), + mask=[True, False, False, False, False, False], + fill_value=-999, + ) + ) expected_coord = DimCoord( np.array([-999, 1, 2, 3, 4, 5]), long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=self.bounds) + bounds=self.bounds, + ) with warnings.catch_warnings(record=True) as w: # Asserts must lie within context manager because of deferred @@ -134,24 +142,28 @@ def test_dim_coord_construction_masked_array(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Assert warning is raised assert len(w) == 1 - assert 'Gracefully filling' in w[0].message.args[0] + assert "Gracefully filling" in w[0].message.args[0] def test_dim_coord_construction_masked_array_mask_does_nothing(self): - self._set_cf_coord_var(np.ma.array( - np.arange(6), - mask=False, - )) + self._set_cf_coord_var( + np.ma.array( + np.arange(6), + mask=False, + ) + ) expected_coord = DimCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=self.bounds) + bounds=self.bounds, + ) with warnings.catch_warnings(record=True) as w: # Asserts must lie within context manager because of deferred @@ -161,7 +173,8 @@ def test_dim_coord_construction_masked_array_mask_does_nothing(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Assert no warning is raised assert len(w) == 0 @@ -175,7 +188,8 @@ def test_dim_coord_construction_masked_bounds_mask_does_nothing(self): long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=self.bounds) + bounds=self.bounds, + ) with warnings.catch_warnings(record=True) as w: # Asserts must lie within context manager because of deferred @@ -185,7 +199,8 @@ def test_dim_coord_construction_masked_bounds_mask_does_nothing(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Assert no warning is raised assert len(w) == 0 @@ -200,20 +215,23 @@ def test_aux_coord_construction(self): long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=self.bounds) + bounds=self.bounds, + ) - warning_patch = mock.patch('warnings.warn') + warning_patch = mock.patch("warnings.warn") # Asserts must lie within context manager because of deferred loading. - with warning_patch, self.deferred_load_patch, \ - self.get_cf_bounds_var_patch: + with warning_patch, self.deferred_load_patch, self.get_cf_bounds_var_patch: build_dimension_coordinate(self.engine, self.cf_coord_var) # Test that expected coord is built and added to cube. self.engine.cube.add_aux_coord.assert_called_with( - expected_coord, [0]) - self.assertIn("creating 'wibble' auxiliary coordinate instead", - warnings.warn.call_args[0][0]) + expected_coord, [0] + ) + self.assertIn( + "creating 'wibble' auxiliary coordinate instead", + warnings.warn.call_args[0][0], + ) class TestBoundsVertexDim(tests.IrisTest, RulesTestMixin): @@ -223,24 +241,26 @@ def setUp(self): # Create test coordinate cf variable. points = np.arange(6) self.cf_coord_var = mock.Mock( - dimensions=('foo',), - cf_name='wibble', + dimensions=("foo",), + cf_name="wibble", standard_name=None, - long_name='wibble', + long_name="wibble", cf_data=mock.Mock(spec=[]), - units='m', + units="m", shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) def test_slowest_varying_vertex_dim(self): # Create the bounds cf variable. bounds = np.arange(12).reshape(2, 6) self.cf_bounds_var = mock.Mock( - dimensions=('nv', 'foo'), - cf_name='wibble_bnds', + dimensions=("nv", "foo"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) # Expected bounds on the resulting coordinate should be rolled so that # the vertex dimension is at the end. @@ -250,7 +270,8 @@ def test_slowest_varying_vertex_dim(self): long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=expected_bounds) + bounds=expected_bounds, + ) # Asserts must lie within context manager because of deferred loading. with self.deferred_load_patch, self.get_cf_bounds_var_patch: @@ -258,27 +279,31 @@ def test_slowest_varying_vertex_dim(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts['coordinates'], - expected_list) + self.assertEqual( + self.engine.cube_parts["coordinates"], expected_list + ) def test_fastest_varying_vertex_dim(self): bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( - dimensions=('foo', 'nv'), - cf_name='wibble_bnds', + dimensions=("foo", "nv"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) expected_coord = DimCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=bounds) + bounds=bounds, + ) # Asserts must lie within context manager because of deferred loading. with self.deferred_load_patch, self.get_cf_bounds_var_patch: @@ -286,12 +311,14 @@ def test_fastest_varying_vertex_dim(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts['coordinates'], - expected_list) + self.assertEqual( + self.engine.cube_parts["coordinates"], expected_list + ) def test_fastest_with_different_dim_names(self): # Despite the dimension names 'x' differing from the coord's @@ -299,17 +326,19 @@ def test_fastest_with_different_dim_names(self): # this should still work because the vertex dim is the fastest varying. bounds = np.arange(12).reshape(6, 2) self.cf_bounds_var = mock.Mock( - dimensions=('x', 'nv'), - cf_name='wibble_bnds', + dimensions=("x", "nv"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) expected_coord = DimCoord( self.cf_coord_var[:], long_name=self.cf_coord_var.long_name, var_name=self.cf_coord_var.cf_name, units=self.cf_coord_var.units, - bounds=bounds) + bounds=bounds, + ) # Asserts must lie within context manager because of deferred loading. with self.deferred_load_patch, self.get_cf_bounds_var_patch: @@ -317,12 +346,14 @@ def test_fastest_with_different_dim_names(self): # Test that expected coord is built and added to cube. self.engine.cube.add_dim_coord.assert_called_with( - expected_coord, [0]) + expected_coord, [0] + ) # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.cube_parts['coordinates'], - expected_list) + self.assertEqual( + self.engine.cube_parts["coordinates"], expected_list + ) class TestCircular(tests.IrisTest, RulesTestMixin): @@ -332,36 +363,38 @@ def setUp(self): RulesTestMixin.setUp(self) self.cf_bounds_var = None - def _make_vars(self, points, bounds=None, units='degrees'): + def _make_vars(self, points, bounds=None, units="degrees"): points = np.array(points) self.cf_coord_var = mock.MagicMock( - dimensions=('foo',), - cf_name='wibble', + dimensions=("foo",), + cf_name="wibble", standard_name=None, - long_name='wibble', + long_name="wibble", cf_data=mock.Mock(spec=[]), units=units, shape=points.shape, dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) if bounds: - bounds = np.array(bounds).reshape( - self.cf_coord_var.shape + (2,)) + bounds = np.array(bounds).reshape(self.cf_coord_var.shape + (2,)) self.cf_bounds_var = mock.Mock( - dimensions=('x', 'nv'), - cf_name='wibble_bnds', + dimensions=("x", "nv"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) def _check_circular(self, circular, *args, **kwargs): - if 'coord_name' in kwargs: - coord_name = kwargs.pop('coord_name') + if "coord_name" in kwargs: + coord_name = kwargs.pop("coord_name") else: - coord_name = 'longitude' + coord_name = "longitude" self._make_vars(*args, **kwargs) with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var, - coord_name=coord_name) + build_dimension_coordinate( + self.engine, self.cf_coord_var, coord_name=coord_name + ) self.assertEqual(self.engine.cube.add_dim_coord.call_count, 1) coord, dims = self.engine.cube.add_dim_coord.call_args[0] self.assertEqual(coord.circular, circular) @@ -395,11 +428,11 @@ def test_multiple_unbounded_circular(self): def test_non_angle_noncircular(self): points = [0.0, 90.0, 180.0, 270.0] - self.check_noncircular(points, units='m') + self.check_noncircular(points, units="m") def test_non_longitude_noncircular(self): points = [0.0, 90.0, 180.0, 270.0] - self.check_noncircular(points, coord_name='depth') + self.check_noncircular(points, coord_name="depth") def test_multiple_unbounded_irregular_noncircular(self): self.check_noncircular([0.0, 90.0, 189.999, 270.0]) @@ -411,16 +444,16 @@ def test_multiple_unbounded_shortrange_circular(self): self.check_circular([0.0, 90.0, 180.0, 269.9999]) def test_multiple_bounded_circular(self): - self.check_circular([0.0, 120.3, 240.0], - bounds=[[-45.0, 50.0], - [100.0, 175.0], - [200.0, 315.0]]) + self.check_circular( + [0.0, 120.3, 240.0], + bounds=[[-45.0, 50.0], [100.0, 175.0], [200.0, 315.0]], + ) def test_multiple_bounded_noncircular(self): - self.check_noncircular([0.0, 120.3, 240.0], - bounds=[[-45.0, 50.0], - [100.0, 175.0], - [200.0, 355.0]]) + self.check_noncircular( + [0.0, 120.3, 240.0], + bounds=[[-45.0, 50.0], [100.0, 175.0], [200.0, 355.0]], + ) class TestCircularScalar(tests.IrisTest, RulesTestMixin): @@ -431,57 +464,60 @@ def _make_vars(self, bounds): # Create cf vars for the coordinate and its bounds. # Note that for a scalar the shape of the array from # the cf var is (), rather than (1,). - points = np.array([0.]) + points = np.array([0.0]) self.cf_coord_var = mock.Mock( dimensions=(), - cf_name='wibble', + cf_name="wibble", standard_name=None, - long_name='wibble', - units='degrees', + long_name="wibble", + units="degrees", cf_data=mock.Mock(spec=[]), shape=(), dtype=points.dtype, - __getitem__=lambda self, key: points[key]) + __getitem__=lambda self, key: points[key], + ) bounds = np.array(bounds) self.cf_bounds_var = mock.Mock( - dimensions=(u'bnds'), - cf_name='wibble_bnds', + dimensions=("bnds"), + cf_name="wibble_bnds", shape=bounds.shape, - __getitem__=lambda self, key: bounds[key]) + __getitem__=lambda self, key: bounds[key], + ) def _assert_circular(self, value): with self.deferred_load_patch, self.get_cf_bounds_var_patch: - build_dimension_coordinate(self.engine, self.cf_coord_var, - coord_name='longitude') + build_dimension_coordinate( + self.engine, self.cf_coord_var, coord_name="longitude" + ) self.assertEqual(self.engine.cube.add_aux_coord.call_count, 1) coord, dims = self.engine.cube.add_aux_coord.call_args[0] self.assertEqual(coord.circular, value) def test_two_bounds_noncircular(self): - self._make_vars([0., 180.]) + self._make_vars([0.0, 180.0]) self._assert_circular(False) def test_two_bounds_circular(self): - self._make_vars([0., 360.]) + self._make_vars([0.0, 360.0]) self._assert_circular(True) def test_two_bounds_circular_decreasing(self): - self._make_vars([360., 0.]) + self._make_vars([360.0, 0.0]) self._assert_circular(True) def test_two_bounds_circular_alt(self): - self._make_vars([-180., 180.]) + self._make_vars([-180.0, 180.0]) self._assert_circular(True) def test_two_bounds_circular_alt_decreasing(self): - self._make_vars([180., -180.]) + self._make_vars([180.0, -180.0]) self._assert_circular(True) def test_four_bounds(self): - self._make_vars([0., 10., 20., 30.]) + self._make_vars([0.0, 10.0, 20.0, 30.0]) self._assert_circular(False) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py similarity index 68% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py index b9a95bc094..28b3d8ab9a 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_geostationary_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_geostationary_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_geostationary_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_geostationary_coordinate_system`. """ @@ -17,12 +17,15 @@ import iris from iris.coord_systems import Geostationary -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_geostationary_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_geostationary_coordinate_system, +) class TestBuildGeostationaryCoordinateSystem(tests.IrisTest): - def _test(self, inverse_flattening=False, replace_props=None, remove_props=None): + def _test( + self, inverse_flattening=False, replace_props=None, remove_props=None + ): """ Generic test that can check vertical perspective validity with or without inverse flattening. @@ -30,12 +33,13 @@ def _test(self, inverse_flattening=False, replace_props=None, remove_props=None) # Make a dictionary of the non-ellipsoid properties to be added to both a test # coord-system, and a test grid-mapping cf_var. non_ellipsoid_kwargs = { - 'latitude_of_projection_origin': 0.0, - 'longitude_of_projection_origin': 2.0, - 'perspective_point_height': 2000000.0, - 'sweep_angle_axis': 'x', - 'false_easting': 100.0, - 'false_northing': 200.0} + "latitude_of_projection_origin": 0.0, + "longitude_of_projection_origin": 2.0, + "perspective_point_height": 2000000.0, + "sweep_angle_axis": "x", + "false_easting": 100.0, + "false_northing": 200.0, + } # Make specified adjustments to the non-ellipsoid properties. if remove_props: @@ -47,11 +51,11 @@ def _test(self, inverse_flattening=False, replace_props=None, remove_props=None) # Make a dictionary of ellipsoid properties, to be added to both a test # ellipsoid and the grid-mapping cf_var. - ellipsoid_kwargs = {'semi_major_axis': 6377563.396} + ellipsoid_kwargs = {"semi_major_axis": 6377563.396} if inverse_flattening: - ellipsoid_kwargs['inverse_flattening'] = 299.3249646 + ellipsoid_kwargs["inverse_flattening"] = 299.3249646 else: - ellipsoid_kwargs['semi_minor_axis'] = 6356256.909 + ellipsoid_kwargs["semi_minor_axis"] = 6356256.909 cf_grid_var_kwargs = non_ellipsoid_kwargs.copy() cf_grid_var_kwargs.update(ellipsoid_kwargs) @@ -68,10 +72,12 @@ def test_inverse_flattening(self): self._test(inverse_flattening=True) def test_false_offsets_missing(self): - self._test(remove_props=['false_easting', 'false_northing']) + self._test(remove_props=["false_easting", "false_northing"]) def test_false_offsets_none(self): - self._test(replace_props={'false_easting':None, 'false_northing':None}) + self._test( + replace_props={"false_easting": None, "false_northing": None} + ) if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_azimuthal_equal_area_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py similarity index 76% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_azimuthal_equal_area_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py index f5346cbc68..05185a4cf5 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_azimuthal_equal_area_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_azimuthal_equal_area_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_lambert_azimuthal_equal_area_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_lambert_azimuthal_equal_area_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import LambertAzimuthalEqualArea -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_lambert_azimuthal_equal_area_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_lambert_azimuthal_equal_area_coordinate_system, +) class TestBuildLambertAzimuthalEqualAreaCoordinateSystem(tests.IrisTest): @@ -41,31 +42,35 @@ def _test(self, inverse_flattening=False, no_optionals=False): latitude_of_projection_origin=test_lat, longitude_of_projection_origin=test_lon, false_easting=test_easting, - false_northing=test_northing) + false_northing=test_northing, + ) # Add ellipsoid args. - gridvar_props['semi_major_axis'] = 6377563.396 + gridvar_props["semi_major_axis"] = 6377563.396 if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) cf_grid_var = mock.Mock(spec=[], **gridvar_props) cs = build_lambert_azimuthal_equal_area_coordinate_system( - None, cf_grid_var) + None, cf_grid_var + ) expected = LambertAzimuthalEqualArea( latitude_of_projection_origin=test_lat, longitude_of_projection_origin=test_lon, false_easting=test_easting, false_northing=test_northing, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_conformal_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py similarity index 78% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_conformal_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py index 458b60d36f..22bb7149b1 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_lambert_conformal_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_lambert_conformal_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_lambert_conformal_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_lambert_conformal_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import LambertConformal -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_lambert_conformal_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_lambert_conformal_coordinate_system, +) class TestBuildLambertConformalCoordinateSystem(tests.IrisTest): @@ -44,19 +45,21 @@ def _test(self, inverse_flattening=False, no_optionals=False): longitude_of_central_meridian=test_lon, false_easting=test_easting, false_northing=test_northing, - standard_parallel=test_parallels) + standard_parallel=test_parallels, + ) # Add ellipsoid args. - gridvar_props['semi_major_axis'] = 6377563.396 + gridvar_props["semi_major_axis"] = 6377563.396 if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) cf_grid_var = mock.Mock(spec=[], **gridvar_props) @@ -68,7 +71,8 @@ def _test(self, inverse_flattening=False, no_optionals=False): false_easting=test_easting, false_northing=test_northing, secant_latitudes=test_parallels, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py similarity index 74% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py index f0a20c189f..2be5477cb7 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_mercator_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_mercator_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_mercator_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import Mercator -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_mercator_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_mercator_coordinate_system, +) class TestBuildMercatorCoordinateSystem(tests.IrisTest): @@ -27,16 +28,19 @@ def test_valid(self): spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin), + cf_grid_var.longitude_of_projection_origin + ), ellipsoid=iris.coord_systems.GeogCS( - cf_grid_var.semi_major_axis, - cf_grid_var.semi_minor_axis)) + cf_grid_var.semi_major_axis, cf_grid_var.semi_minor_axis + ), + ) self.assertEqual(cs, expected) def test_inverse_flattening(self): @@ -44,30 +48,37 @@ def test_inverse_flattening(self): spec=[], longitude_of_projection_origin=-90, semi_major_axis=6377563.396, - inverse_flattening=299.3249646) + inverse_flattening=299.3249646, + ) cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( longitude_of_projection_origin=( - cf_grid_var.longitude_of_projection_origin), + cf_grid_var.longitude_of_projection_origin + ), ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, - inverse_flattening=cf_grid_var.inverse_flattening)) + inverse_flattening=cf_grid_var.inverse_flattening, + ), + ) self.assertEqual(cs, expected) def test_longitude_missing(self): cf_grid_var = mock.Mock( spec=[], semi_major_axis=6377563.396, - inverse_flattening=299.3249646) + inverse_flattening=299.3249646, + ) cs = build_mercator_coordinate_system(None, cf_grid_var) expected = Mercator( ellipsoid=iris.coord_systems.GeogCS( cf_grid_var.semi_major_axis, - inverse_flattening=cf_grid_var.inverse_flattening)) + inverse_flattening=cf_grid_var.inverse_flattening, + ) + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py similarity index 74% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py index 358958ce84..5058e4d7d3 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_stereographic_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_stereographic_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_sterographic_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_sterographic_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import Stereographic -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_stereographic_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_stereographic_coordinate_system, +) class TestBuildStereographicCoordinateSystem(tests.IrisTest): @@ -31,21 +32,23 @@ def _test(self, inverse_flattening=False, no_offsets=False): false_easting=test_easting, false_northing=test_northing, scale_factor_at_projection_origin=1, - semi_major_axis=6377563.396) + semi_major_axis=6377563.396, + ) if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) if no_offsets: - del gridvar_props['false_easting'] - del gridvar_props['false_northing'] + del gridvar_props["false_easting"] + del gridvar_props["false_northing"] test_easting = 0 test_northing = 0 @@ -58,7 +61,8 @@ def _test(self, inverse_flattening=False, no_offsets=False): central_lon=cf_grid_var.longitude_of_projection_origin, false_easting=test_easting, false_northing=test_northing, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_transverse_mercator_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py similarity index 68% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_transverse_mercator_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py index 7487168fba..0096c5df4b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_transverse_mercator_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_transverse_mercator_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_transverse_mercator_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_transverse_mercator_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import TransverseMercator -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_transverse_mercator_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_transverse_mercator_coordinate_system, +) class TestBuildTransverseMercatorCoordinateSystem(tests.IrisTest): @@ -32,22 +33,24 @@ def _test(self, inverse_flattening=False, no_options=False): false_easting=test_easting, false_northing=test_northing, scale_factor_at_central_meridian=test_scale_factor, - semi_major_axis=6377563.396) + semi_major_axis=6377563.396, + ) if inverse_flattening: - gridvar_props['inverse_flattening'] = 299.3249646 + gridvar_props["inverse_flattening"] = 299.3249646 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, - inverse_flattening=299.3249646) + 6377563.396, inverse_flattening=299.3249646 + ) else: - gridvar_props['semi_minor_axis'] = 6356256.909 + gridvar_props["semi_minor_axis"] = 6356256.909 expected_ellipsoid = iris.coord_systems.GeogCS( - 6377563.396, 6356256.909) + 6377563.396, 6356256.909 + ) if no_options: - del gridvar_props['false_easting'] - del gridvar_props['false_northing'] - del gridvar_props['scale_factor_at_central_meridian'] + del gridvar_props["false_easting"] + del gridvar_props["false_northing"] + del gridvar_props["scale_factor_at_central_meridian"] test_easting = 0 test_northing = 0 test_scale_factor = 1.0 @@ -58,13 +61,16 @@ def _test(self, inverse_flattening=False, no_options=False): expected = TransverseMercator( latitude_of_projection_origin=( - cf_grid_var.latitude_of_projection_origin), + cf_grid_var.latitude_of_projection_origin + ), longitude_of_central_meridian=( - cf_grid_var.longitude_of_central_meridian), + cf_grid_var.longitude_of_central_meridian + ), false_easting=test_easting, false_northing=test_northing, scale_factor_at_central_meridian=test_scale_factor, - ellipsoid=expected_ellipsoid) + ellipsoid=expected_ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_verticalp_coordinate_system.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py similarity index 63% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_verticalp_coordinate_system.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py index 588b82fd99..f34992c2be 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_verticalp_coordinate_system.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_build_verticalp_coordinate_system.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_vertical_perspective_coordinate_system`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +build_vertical_perspective_coordinate_system`. """ @@ -17,8 +17,9 @@ import iris from iris.coord_systems import VerticalPerspective -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - build_vertical_perspective_coordinate_system +from iris.fileformats._nc_load_rules.helpers import ( + build_vertical_perspective_coordinate_system, +) class TestBuildVerticalPerspectiveCoordinateSystem(tests.IrisTest): @@ -30,24 +31,25 @@ def _test(self, inverse_flattening=False, no_offsets=False): test_easting = 100.0 test_northing = 200.0 cf_grid_var_kwargs = { - 'spec': [], - 'latitude_of_projection_origin': 1.0, - 'longitude_of_projection_origin': 2.0, - 'perspective_point_height': 2000000.0, - 'false_easting': test_easting, - 'false_northing': test_northing, - 'semi_major_axis': 6377563.396} - - ellipsoid_kwargs = {'semi_major_axis': 6377563.396} + "spec": [], + "latitude_of_projection_origin": 1.0, + "longitude_of_projection_origin": 2.0, + "perspective_point_height": 2000000.0, + "false_easting": test_easting, + "false_northing": test_northing, + "semi_major_axis": 6377563.396, + } + + ellipsoid_kwargs = {"semi_major_axis": 6377563.396} if inverse_flattening: - ellipsoid_kwargs['inverse_flattening'] = 299.3249646 + ellipsoid_kwargs["inverse_flattening"] = 299.3249646 else: - ellipsoid_kwargs['semi_minor_axis'] = 6356256.909 + ellipsoid_kwargs["semi_minor_axis"] = 6356256.909 cf_grid_var_kwargs.update(ellipsoid_kwargs) if no_offsets: - del cf_grid_var_kwargs['false_easting'] - del cf_grid_var_kwargs['false_northing'] + del cf_grid_var_kwargs["false_easting"] + del cf_grid_var_kwargs["false_northing"] test_easting = 0 test_northing = 0 @@ -56,14 +58,13 @@ def _test(self, inverse_flattening=False, no_offsets=False): cs = build_vertical_perspective_coordinate_system(None, cf_grid_var) expected = VerticalPerspective( - latitude_of_projection_origin=cf_grid_var. - latitude_of_projection_origin, - longitude_of_projection_origin=cf_grid_var. - longitude_of_projection_origin, + latitude_of_projection_origin=cf_grid_var.latitude_of_projection_origin, + longitude_of_projection_origin=cf_grid_var.longitude_of_projection_origin, perspective_point_height=cf_grid_var.perspective_point_height, false_easting=test_easting, false_northing=test_northing, - ellipsoid=ellipsoid) + ellipsoid=ellipsoid, + ) self.assertEqual(cs, expected) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py similarity index 73% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py index 2e493cdecc..a159ef81a8 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_attr_units.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.build_cube_metadata`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +get_attr_units`. """ @@ -17,8 +17,7 @@ import numpy as np -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - get_attr_units +from iris.fileformats._nc_load_rules.helpers import get_attr_units class TestGetAttrUnits(tests.IrisTest): @@ -30,22 +29,23 @@ def _make_cf_var(global_attributes=None): cf_group = mock.Mock(global_attributes=global_attributes) cf_var = mock.MagicMock( - cf_name='sound_frequency', + cf_name="sound_frequency", cf_data=mock.Mock(spec=[]), standard_name=None, long_name=None, - units=u'\u266b', + units="\u266b", dtype=np.float64, cell_methods=None, - cf_group=cf_group) + cf_group=cf_group, + ) return cf_var def test_unicode_character(self): attributes = {} - expected_attributes = {'invalid_units': u'\u266b'} + expected_attributes = {"invalid_units": "\u266b"} cf_var = self._make_cf_var() attr_units = get_attr_units(cf_var, attributes) - self.assertEqual(attr_units, '?') + self.assertEqual(attr_units, "?") self.assertEqual(attributes, expected_attributes) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_cf_bounds_var.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py similarity index 78% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_cf_bounds_var.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py index 26837b630d..ff9c51f40b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_cf_bounds_var.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_cf_bounds_var.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.get_cf_bounds_var`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +get_cf_bounds_var`. """ @@ -15,8 +15,11 @@ from unittest import mock -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - get_cf_bounds_var, CF_ATTR_BOUNDS, CF_ATTR_CLIMATOLOGY +from iris.fileformats._nc_load_rules.helpers import ( + CF_ATTR_BOUNDS, + CF_ATTR_CLIMATOLOGY, + get_cf_bounds_var, +) class TestGetCFBoundsVar(tests.IrisTest): @@ -25,7 +28,7 @@ class TestGetCFBoundsVar(tests.IrisTest): def _generic_test(self, test_climatological_bounds=False): cf_coord_var = mock.MagicMock() - cf_group_dict = {'TEST': mock.sentinel.bounds_var} + cf_group_dict = {"TEST": mock.sentinel.bounds_var} if test_climatological_bounds: cf_coord_var.cf_group.climatology = cf_group_dict test_attr = CF_ATTR_CLIMATOLOGY @@ -34,7 +37,7 @@ def _generic_test(self, test_climatological_bounds=False): test_attr = CF_ATTR_BOUNDS for attr in (CF_ATTR_BOUNDS, CF_ATTR_CLIMATOLOGY): - attr_val = 'TEST' if attr == test_attr else None + attr_val = "TEST" if attr == test_attr else None setattr(cf_coord_var, attr, attr_val) bounds_var, climatological = get_cf_bounds_var(cf_coord_var) @@ -48,5 +51,5 @@ def test_bounds_climatological(self): self._generic_test(test_climatological_bounds=True) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_names.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py similarity index 61% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_names.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py index a8e833cde9..3c7c496b54 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_names.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_get_names.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.get_names`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +get_names`. """ @@ -17,7 +17,7 @@ import numpy as np -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import get_names +from iris.fileformats._nc_load_rules.helpers import get_names class TestGetNames(tests.IrisTest): @@ -34,16 +34,18 @@ class TestGetNames(tests.IrisTest): standard_name. """ + @staticmethod def _make_cf_var(standard_name, long_name, cf_name): cf_var = mock.Mock( cf_name=cf_name, standard_name=standard_name, long_name=long_name, - units='degrees', + units="degrees", dtype=np.float64, cell_methods=None, - cf_group=mock.Mock(global_attributes={})) + cf_group=mock.Mock(global_attributes={}), + ) return cf_var def check_names(self, inputs, expected): @@ -53,11 +55,13 @@ def check_names(self, inputs, expected): # Expected - The expected names and attributes. exp_std_name, exp_long_name, exp_var_name, exp_attributes = expected - cf_var = self._make_cf_var(standard_name=standard_name, - long_name=long_name, cf_name=var_name) + cf_var = self._make_cf_var( + standard_name=standard_name, long_name=long_name, cf_name=var_name + ) attributes = {} res_standard_name, res_long_name, res_var_name = get_names( - cf_var, coord_name, attributes) + cf_var, coord_name, attributes + ) # Check the names and attributes are as expected. self.assertEqual(res_standard_name, exp_std_name) @@ -67,177 +71,220 @@ def check_names(self, inputs, expected): def test_var_name_valid(self): # Only var_name is set and it is set to a valid standard name. - inp = (None, None, 'grid_latitude', None) - exp = ('grid_latitude', None, 'grid_latitude', {}) + inp = (None, None, "grid_latitude", None) + exp = ("grid_latitude", None, "grid_latitude", {}) self.check_names(inp, exp) def test_var_name_valid_coord_name_set(self): # var_name is a valid standard name, coord_name is also set. - inp = (None, None, 'grid_latitude', 'latitude') - exp = ('latitude', None, 'grid_latitude', {}) + inp = (None, None, "grid_latitude", "latitude") + exp = ("latitude", None, "grid_latitude", {}) self.check_names(inp, exp) def test_var_name_invalid(self): # Only var_name is set but it is not a valid standard name. - inp = (None, None, 'lat_var_name', None) - exp = (None, None, 'lat_var_name', {}) + inp = (None, None, "lat_var_name", None) + exp = (None, None, "lat_var_name", {}) self.check_names(inp, exp) def test_var_name_invalid_coord_name_set(self): # var_name is not a valid standard name, the coord_name is also set. - inp = (None, None, 'lat_var_name', 'latitude') - exp = ('latitude', None, 'lat_var_name', {}) + inp = (None, None, "lat_var_name", "latitude") + exp = ("latitude", None, "lat_var_name", {}) self.check_names(inp, exp) def test_long_name_set_var_name_valid(self): # long_name is not None, var_name is set to a valid standard name. - inp = (None, 'lat_long_name', 'grid_latitude', None) - exp = ('grid_latitude', 'lat_long_name', 'grid_latitude', {}) + inp = (None, "lat_long_name", "grid_latitude", None) + exp = ("grid_latitude", "lat_long_name", "grid_latitude", {}) self.check_names(inp, exp) def test_long_name_set_var_name_valid_coord_name_set(self): # long_name is not None, var_name is set to a valid standard name, and # coord_name is set. - inp = (None, 'lat_long_name', 'grid_latitude', 'latitude') - exp = ('latitude', 'lat_long_name', 'grid_latitude', {}) + inp = (None, "lat_long_name", "grid_latitude", "latitude") + exp = ("latitude", "lat_long_name", "grid_latitude", {}) self.check_names(inp, exp) def test_long_name_set_var_name_invalid(self): # long_name is not None, var_name is not set to a valid standard name. - inp = (None, 'lat_long_name', 'lat_var_name', None) - exp = (None, 'lat_long_name', 'lat_var_name', {}) + inp = (None, "lat_long_name", "lat_var_name", None) + exp = (None, "lat_long_name", "lat_var_name", {}) self.check_names(inp, exp) def test_long_name_set_var_name_invalid_coord_name_set(self): # long_name is not None, var_name is not set to a valid standard name, # and coord_name is set. - inp = (None, 'lat_long_name', 'lat_var_name', 'latitude') - exp = ('latitude', 'lat_long_name', 'lat_var_name', {}) + inp = (None, "lat_long_name", "lat_var_name", "latitude") + exp = ("latitude", "lat_long_name", "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_valid_var_name_valid(self): # standard_name is a valid standard name, var_name is a valid standard # name. - inp = ('projection_y_coordinate', None, 'grid_latitude', None) - exp = ('projection_y_coordinate', None, 'grid_latitude', {}) + inp = ("projection_y_coordinate", None, "grid_latitude", None) + exp = ("projection_y_coordinate", None, "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_valid_var_name_valid_coord_name_set(self): # standard_name is a valid standard name, var_name is a valid standard # name, coord_name is set. - inp = ('projection_y_coordinate', None, 'grid_latitude', 'latitude') - exp = ('projection_y_coordinate', None, 'grid_latitude', {}) + inp = ("projection_y_coordinate", None, "grid_latitude", "latitude") + exp = ("projection_y_coordinate", None, "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_valid_var_name_invalid(self): # standard_name is a valid standard name, var_name is not a valid # standard name. - inp = ('projection_y_coordinate', None, 'lat_var_name', None) - exp = ('projection_y_coordinate', None, 'lat_var_name', {}) + inp = ("projection_y_coordinate", None, "lat_var_name", None) + exp = ("projection_y_coordinate", None, "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_valid_var_name_invalid_coord_name_set(self): # standard_name is a valid standard name, var_name is not a valid # standard name, coord_name is set. - inp = ('projection_y_coordinate', None, 'lat_var_name', 'latitude') - exp = ('projection_y_coordinate', None, 'lat_var_name', {}) + inp = ("projection_y_coordinate", None, "lat_var_name", "latitude") + exp = ("projection_y_coordinate", None, "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_valid_long_name_set_var_name_valid(self): # standard_name is a valid standard name, long_name is not None, # var_name is a valid standard name. - inp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', - None) - exp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', {}) + inp = ( + "projection_y_coordinate", + "lat_long_name", + "grid_latitude", + None, + ) + exp = ("projection_y_coordinate", "lat_long_name", "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_valid_long_name_set_var_name_valid_coord_name_set(self): # standard_name is a valid standard name, long_name is not None, # var_name is a valid standard name, coord_name is set. - inp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', - 'latitude') - exp = ('projection_y_coordinate', 'lat_long_name', 'grid_latitude', {}) + inp = ( + "projection_y_coordinate", + "lat_long_name", + "grid_latitude", + "latitude", + ) + exp = ("projection_y_coordinate", "lat_long_name", "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_valid_long_name_set_var_name_invalid(self): # standard_name is a valid standard name, long_name is not None, # var_name is not a valid standard name. - inp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', - None) - exp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', {}) + inp = ( + "projection_y_coordinate", + "lat_long_name", + "lat_var_name", + None, + ) + exp = ("projection_y_coordinate", "lat_long_name", "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_valid_long_name_set_var_name_invalid_coord_name_set( - self): + self, + ): # standard_name is a valid standard name, long_name is not None, # var_name is not a valid standard name, coord_name is set. - inp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', - 'latitude') - exp = ('projection_y_coordinate', 'lat_long_name', 'lat_var_name', {}) + inp = ( + "projection_y_coordinate", + "lat_long_name", + "lat_var_name", + "latitude", + ) + exp = ("projection_y_coordinate", "lat_long_name", "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_invalid_var_name_valid(self): # standard_name is not a valid standard name, var_name is a valid # standard name. - inp = ('latitude_coord', None, 'grid_latitude', None) - exp = ('grid_latitude', None, 'grid_latitude', {}) + inp = ("latitude_coord", None, "grid_latitude", None) + exp = ("grid_latitude", None, "grid_latitude", {}) self.check_names(inp, exp) def test_std_name_invalid_var_name_valid_coord_name_set(self): # standard_name is not a valid standard name, var_name is a valid # standard name, coord_name is set. - inp = ('latitude_coord', None, 'grid_latitude', 'latitude') - exp = ('latitude', None, 'grid_latitude', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", None, "grid_latitude", "latitude") + exp = ( + "latitude", + None, + "grid_latitude", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_var_name_invalid(self): # standard_name is not a valid standard name, var_name is not a valid # standard name. - inp = ('latitude_coord', None, 'lat_var_name', None) - exp = (None, None, 'lat_var_name', {}) + inp = ("latitude_coord", None, "lat_var_name", None) + exp = (None, None, "lat_var_name", {}) self.check_names(inp, exp) def test_std_name_invalid_var_name_invalid_coord_name_set(self): # standard_name is not a valid standard name, var_name is not a valid # standard name, coord_name is set. - inp = ('latitude_coord', None, 'lat_var_name', 'latitude') - exp = ('latitude', None, 'lat_var_name', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", None, "lat_var_name", "latitude") + exp = ( + "latitude", + None, + "lat_var_name", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_long_name_set_var_name_valid(self): # standard_name is not a valid standard name, long_name is not None # var_name is a valid standard name. - inp = ('latitude_coord', 'lat_long_name', 'grid_latitude', None) - exp = ('grid_latitude', 'lat_long_name', 'grid_latitude', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", "lat_long_name", "grid_latitude", None) + exp = ( + "grid_latitude", + "lat_long_name", + "grid_latitude", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_long_name_set_var_name_valid_coord_name_set( - self): + self, + ): # standard_name is not a valid standard name, long_name is not None, # var_name is a valid standard name, coord_name is set. - inp = ('latitude_coord', 'lat_long_name', 'grid_latitude', 'latitude') - exp = ('latitude', 'lat_long_name', 'grid_latitude', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", "lat_long_name", "grid_latitude", "latitude") + exp = ( + "latitude", + "lat_long_name", + "grid_latitude", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_long_name_set_var_name_invalid(self): # standard_name is not a valid standard name, long_name is not None # var_name is not a valid standard name. - inp = ('latitude_coord', 'lat_long_name', 'lat_var_name', None) - exp = (None, 'lat_long_name', 'lat_var_name', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", "lat_long_name", "lat_var_name", None) + exp = ( + None, + "lat_long_name", + "lat_var_name", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) def test_std_name_invalid_long_name_set_var_name_invalid_coord_name_set( - self): + self, + ): # standard_name is not a valid standard name, long_name is not None, # var_name is not a valid standard name, coord_name is set. - inp = ('latitude_coord', 'lat_long_name', 'lat_var_name', 'latitude') - exp = ('latitude', 'lat_long_name', 'lat_var_name', - {'invalid_standard_name': 'latitude_coord'}) + inp = ("latitude_coord", "lat_long_name", "lat_var_name", "latitude") + exp = ( + "latitude", + "lat_long_name", + "lat_var_name", + {"invalid_standard_name": "latitude_coord"}, + ) self.check_names(inp, exp) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py similarity index 82% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py index 2e1d315de4..dfe2895f29 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_mercator_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_mercator_parameters.py @@ -4,22 +4,22 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.has_supported_mercator_parameters`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +has_supported_mercator_parameters`. """ +from unittest import mock import warnings +from iris.fileformats._nc_load_rules.helpers import ( + has_supported_mercator_parameters, +) + # import iris tests first so that some things can be initialised before # importing anything else import iris.tests as tests # isort:skip -from unittest import mock - -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - has_supported_mercator_parameters - def _engine(cf_grid_var, cf_name): cf_group = {cf_name: cf_grid_var} @@ -28,9 +28,8 @@ def _engine(cf_grid_var, cf_name): class TestHasSupportedMercatorParameters(tests.IrisTest): - def test_valid(self): - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=-90, @@ -38,7 +37,8 @@ def test_valid(self): false_northing=0, scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) is_valid = has_supported_mercator_parameters(engine, cf_name) @@ -48,7 +48,7 @@ def test_valid(self): def test_invalid_scale_factor(self): # Iris does not yet support scale factors other than one for # Mercator projections - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, @@ -56,7 +56,8 @@ def test_invalid_scale_factor(self): false_northing=0, scale_factor_at_projection_origin=0.9, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -65,12 +66,12 @@ def test_invalid_scale_factor(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'Scale factor') + self.assertRegex(str(warns[0]), "Scale factor") def test_invalid_standard_parallel(self): # Iris does not yet support standard parallels other than zero for # Mercator projections - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, @@ -78,7 +79,8 @@ def test_invalid_standard_parallel(self): false_northing=0, standard_parallel=30, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -87,12 +89,12 @@ def test_invalid_standard_parallel(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'Standard parallel') + self.assertRegex(str(warns[0]), "Standard parallel") def test_invalid_false_easting(self): # Iris does not yet support false eastings other than zero for # Mercator projections - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, @@ -100,7 +102,8 @@ def test_invalid_false_easting(self): false_northing=0, scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -109,12 +112,12 @@ def test_invalid_false_easting(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'False easting') + self.assertRegex(str(warns[0]), "False easting") def test_invalid_false_northing(self): # Iris does not yet support false northings other than zero for # Mercator projections - cf_name = 'mercator' + cf_name = "mercator" cf_grid_var = mock.Mock( spec=[], longitude_of_projection_origin=0, @@ -122,7 +125,8 @@ def test_invalid_false_northing(self): false_northing=100, scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -131,7 +135,7 @@ def test_invalid_false_northing(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'False northing') + self.assertRegex(str(warns[0]), "False northing") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py similarity index 81% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py index fd588b6fcb..8bec823f4b 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_has_supported_stereographic_parameters.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_has_supported_stereographic_parameters.py @@ -4,22 +4,22 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.has_supported_stereographic_parameters`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +has_supported_stereographic_parameters`. """ +from unittest import mock import warnings +from iris.fileformats._nc_load_rules.helpers import ( + has_supported_stereographic_parameters, +) + # import iris tests first so that some things can be initialised before # importing anything else import iris.tests as tests # isort:skip -from unittest import mock - -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - has_supported_stereographic_parameters - def _engine(cf_grid_var, cf_name): cf_group = {cf_name: cf_grid_var} @@ -29,7 +29,7 @@ def _engine(cf_grid_var, cf_name): class TestHasSupportedStereographicParameters(tests.IrisTest): def test_valid(self): - cf_name = 'stereographic' + cf_name = "stereographic" cf_grid_var = mock.Mock( spec=[], latitude_of_projection_origin=0, @@ -38,7 +38,8 @@ def test_valid(self): false_northing=200, scale_factor_at_projection_origin=1, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) is_valid = has_supported_stereographic_parameters(engine, cf_name) @@ -48,7 +49,7 @@ def test_valid(self): def test_invalid_scale_factor(self): # Iris does not yet support scale factors other than one for # stereographic projections - cf_name = 'stereographic' + cf_name = "stereographic" cf_grid_var = mock.Mock( spec=[], latitude_of_projection_origin=0, @@ -57,7 +58,8 @@ def test_invalid_scale_factor(self): false_northing=200, scale_factor_at_projection_origin=0.9, semi_major_axis=6377563.396, - semi_minor_axis=6356256.909) + semi_minor_axis=6356256.909, + ) engine = _engine(cf_grid_var, cf_name) with warnings.catch_warnings(record=True) as warns: @@ -66,7 +68,7 @@ def test_invalid_scale_factor(self): self.assertFalse(is_valid) self.assertEqual(len(warns), 1) - self.assertRegex(str(warns[0]), 'Scale factor') + self.assertRegex(str(warns[0]), "Scale factor") if __name__ == "__main__": diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_reorder_bounds_data.py b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py similarity index 60% rename from lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_reorder_bounds_data.py rename to lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py index 83843cf782..1ee0cfbf2e 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_reorder_bounds_data.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/helpers/test_reorder_bounds_data.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -Test function :func:`iris.fileformats._pyke_rules.compiled_krb.\ -fc_rules_cf_fc.reorder_bounds_data`. +Test function :func:`iris.fileformats._nc_load_rules.helpers.\ +reorder_bounds_data`. """ @@ -17,16 +17,16 @@ import numpy as np -from iris.fileformats._pyke_rules.compiled_krb.fc_rules_cf_fc import \ - reorder_bounds_data +from iris.fileformats._nc_load_rules.helpers import reorder_bounds_data class Test(tests.IrisTest): def test_fastest_varying(self): bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock(dimensions=('foo', 'bar', 'nv'), - cf_name='wibble_bnds') - cf_coord_var = mock.Mock(dimensions=('foo', 'bar')) + cf_bounds_var = mock.Mock( + dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" + ) + cf_coord_var = mock.Mock(dimensions=("foo", "bar")) res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) # Vertex dimension (nv) is already at the end. @@ -34,8 +34,8 @@ def test_fastest_varying(self): def test_slowest_varying(self): bounds_data = np.arange(24).reshape(4, 2, 3) - cf_bounds_var = mock.Mock(dimensions=('nv', 'foo', 'bar')) - cf_coord_var = mock.Mock(dimensions=('foo', 'bar')) + cf_bounds_var = mock.Mock(dimensions=("nv", "foo", "bar")) + cf_coord_var = mock.Mock(dimensions=("foo", "bar")) res = reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) # Move zeroth dimension (nv) to the end. @@ -44,12 +44,13 @@ def test_slowest_varying(self): def test_different_dim_names(self): bounds_data = np.arange(24).reshape(2, 3, 4) - cf_bounds_var = mock.Mock(dimensions=('foo', 'bar', 'nv'), - cf_name='wibble_bnds') - cf_coord_var = mock.Mock(dimensions=('x', 'y'), cf_name='wibble') - with self.assertRaisesRegex(ValueError, 'dimension names'): + cf_bounds_var = mock.Mock( + dimensions=("foo", "bar", "nv"), cf_name="wibble_bnds" + ) + cf_coord_var = mock.Mock(dimensions=("x", "y"), cf_name="wibble") + with self.assertRaisesRegex(ValueError, "dimension names"): reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var) -if __name__ == '__main__': +if __name__ == "__main__": tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/__init__.py index 732094f67a..b8d0502075 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/__init__.py @@ -3,4 +3,4 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the :mod:`iris.fileformats.netcdf` module.""" +"""Unit tests for the :mod:`iris.fileformats.netcdf._nc_load_rules` module.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py deleted file mode 100644 index 8bc429a906..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the `iris.fileformats.netcdf._load_cube` function.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py deleted file mode 100644 index 8340147bb1..0000000000 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py +++ /dev/null @@ -1,55 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -""" -Unit tests for the engine.activate() call within the -`iris.fileformats.netcdf._load_cube` function. - -For now, these tests are designed to function with **either** the "old" -Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the -"new" :mod:`iris.fileformats._nc_load_rules`. -Both of those supply an "engine" with an "activate" method - -- at least for now : may be simplified in future. - -""" -import iris.tests as tests -from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate.test__grid_mappings import ( - Mixin__grid_mapping, -) - - -class Test__additional(Mixin__grid_mapping, tests.IrisTest): - # Run grid-mapping tests with non-Pyke (actions) - use_pyke = True - debug = False - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def test_nondim_lats(self): - # Check what happens when values don't allow a coord to be dim-coord. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_latitude_longitude - # 003 : fc_provides_coordinate_latitude - # 004 : fc_provides_coordinate_longitude - # 005 : fc_build_coordinate_latitude - # 006 : fc_build_coordinate_longitude - # NOTES: - # in terms of rule triggers, this is not distinct from a normal case - # - but the latitude is now an aux-coord. - warning = "must be.* monotonic" - result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) - self.check_result(result, yco_is_aux=True) - - -if __name__ == "__main__": - tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py similarity index 100% rename from lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube.py rename to lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/__init__.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/__init__.py deleted file mode 100644 index ae709e85e1..0000000000 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the LGPL license. -# See COPYING and COPYING.LESSER in the root of the repository for full -# licensing details. -"""Unit tests for the :mod:`iris.fileformats.fc_rules_cf_fc` module.""" diff --git a/pyproject.toml b/pyproject.toml index 58ce1daba1..310bfb05d4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,6 @@ [build-system] # Defined by PEP 518 requires = [ - "scitools-pyke", "setuptools>=40.8.0", "wheel", ] @@ -16,7 +15,6 @@ include = '\.pyi?$' extend-exclude = ''' ( /( - | pyke_rules | sphinxext | tools )/ diff --git a/requirements/ci/nox.lock/py37-linux-64.lock b/requirements/ci/nox.lock/py37-linux-64.lock deleted file mode 100644 index 016a9ebb69..0000000000 --- a/requirements/ci/nox.lock/py37-linux-64.lock +++ /dev/null @@ -1,232 +0,0 @@ -# platform: linux-64 -# env_hash: 846d5ea3acab5e11a9cd84738a73737ed3b365db07cc9b6825611e23c6db0e3d -@EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.5.30-ha878542_0.tar.bz2#6a777890e94194dc94a29a76d2a7e721 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.35.1-hea4e1c9_2.tar.bz2#83610dba766a186bdc7a116053b782a4 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-9.3.0-hff62375_19.tar.bz2#c2d8da3cb171e4aa642d20c6e4e42a04 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-9.3.0-h6de172a_19.tar.bz2#cd9a24a8dde03ec0cf0e603b0bea85a1 -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.25-ha770c72_2.tar.bz2#b1ba065c6d2b9468035472a9d63e5b08 -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-9.3.0-hff62375_19.tar.bz2#aea379bd68fdcdf9499fa1453f852ac1 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-9.3.0-h2828fa1_19.tar.bz2#ab0a307912033126da02507b59e79ec9 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-9.3.0-h2828fa1_19.tar.bz2#9d5cdfc51476ee4dcdd96ed2dca3f943 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.17.1-h7f98852_1.tar.bz2#ed1dc233ed5e3eaa9bfbaac64d130c5e -https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.1-h9c3ff4c_0.tar.bz2#16054ef3cb3ec5d8d29d08772662f65d -https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.9.1-h9c3ff4c_2.tar.bz2#b9a6d9422aed3ad84ec6ccee9bfcaa0f -https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d -https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-68.1-h58526e2_0.tar.bz2#fc7a4271dc2a7f4fd78cd63695baf7c3 -https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9d-h36c2ea0_0.tar.bz2#ea02ce6037dbe81803ae6123e5ba1568 -https://conda.anaconda.org/conda-forge/linux-64/lerc-2.2.1-h9c3ff4c_0.tar.bz2#ea833dcaeb9e7ac4fac521f1a7abec82 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.7-h7f98852_5.tar.bz2#10e242842cd30c59c12d79371dc0f583 -https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.3-h58526e2_2.tar.bz2#665369991d8dd290ac5ee92fce3e6bf5 -https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 -https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.15-pthreads_h8fe5266_1.tar.bz2#bb5527a16584426a897f22643d9a36a6 -https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h58526e2_1007.tar.bz2#7f6569a0c2f27acb8fc90600b382e544 -https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.0-h7f98852_2.tar.bz2#fb63a035a3b552c88a30d84b89ebf4c4 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_0.tar.bz2#4eb64ee0d5cd43096ffcf843c76b05d4 -https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.2-h846660c_100.tar.bz2#0868d02349fc7e128d4bdc515b58dd7e -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.2-h58526e2_4.tar.bz2#509f2a21c4a09214cd737a480dfd80c9 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.30-h9c3ff4c_0.tar.bz2#e6dc1f8f6e0bcebe8e3d8a5bca258dbe -https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1k-h7f98852_0.tar.bz2#07fae2cb088379c8441e0f3ffa1f4025 -https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 -https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 -https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 -https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 -https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h516909a_0.tar.bz2#03a530e925414902547cf48da7756db8 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h516909a_1010.tar.bz2#339cc5584e6d26bc73a875ba900028c3 -https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h0b5b191_1005.tar.bz2#ff6f69b593a9e74c0e6b61908ac513fa -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-9_openblas.tar.bz2#5f08755e98b2a43ca68124e629a5a0cb -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-hcdb4288_3.tar.bz2#d8f51405997093ff1799ded7650439c4 -https://conda.anaconda.org/conda-forge/linux-64/libllvm11-11.1.0-hf817b99_2.tar.bz2#646fa2f7c60b69ee8f918668e9c2fd31 -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.43.0-h812cca2_0.tar.bz2#1867d1e9658596b3fac8847a7702eef4 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.9.0-ha56f1ee_6.tar.bz2#f0dfb86444df325e599dbc3f4c0a3f5b -https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 -https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1003.tar.bz2#a9371e9e40aded194dcba1447606c9a1 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h72842e0_0.tar.bz2#bd14fdf5b9ee5568056a40a6a2f41866 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_0.tar.bz2#2d1b63c574f3e11157a07313e58ba7af -https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.10-h21135ba_1.tar.bz2#c647f70aa7e3d4cc4e029cc1c9a99953 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-h975c496_1.tar.bz2#e663bd5dbc8cc4c1647d9f51cf25872c -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.0-ha95c52a_0.tar.bz2#b56f94865e2de36abf054e7bfa499034 -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.1-hcc1bbae_0.tar.bz2#59b0695a515a6c54d45463dbf208ae38 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-9_openblas.tar.bz2#edee85b4f83376ceae81e0975b8bffa2 -https://conda.anaconda.org/conda-forge/linux-64/libclang-11.1.0-default_ha53f305_1.tar.bz2#b9b71585ca4fcb5d442c5a9df5dd7e98 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.68.3-h3e27bee_0.tar.bz2#99416a3287216de097d503b827ad0bde -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-9_openblas.tar.bz2#572d84ab07962986f6dd8e4637a475ca -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-hf544144_1.tar.bz2#a65a4158716bd7d95bfa69bcfd83081c -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.25-hfa10184_2.tar.bz2#5a35fdd2da4c2d5fdf20575d39c232e5 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.36.0-h9cd32fc_0.tar.bz2#d5bbac924cbda57469f43448d5236a50 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 -https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.68.3-h9c3ff4c_0.tar.bz2#2e9275303dd09a2e245faf31770a1416 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.4-h76c114f_2.tar.bz2#5db765d4974fa89f64c1544eb2a552cb -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.77.0-h2574ce0_0.tar.bz2#05cf8dca8408b5f1ffcc5e2d5a7c5da2 -https://conda.anaconda.org/conda-forge/linux-64/libpq-13.3-hd57d9b9_0.tar.bz2#66ef2cacc483205b7d303f7b02601c3b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.0-h3452ae3_0.tar.bz2#8f4e19a8988c38feec7db41bcd0bf0d0 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.67-hb5efdd6_0.tar.bz2#3f2a4bc7d5fded1327ff1b8c61faae53 -https://conda.anaconda.org/conda-forge/linux-64/python-3.7.10-hffdb5ce_100_cpython.tar.bz2#7425fffa658971915f595e9110163c3c -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 -https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h6cf1ce9_1008.tar.bz2#a43fb47d15e116f8be4be7e6b17ab59f -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-1.6.0-py_0.tar.bz2#76d764d8881719e305f6fa368dc2b65e -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f -https://conda.anaconda.org/conda-forge/linux-64/curl-7.77.0-hea6ffbf_0.tar.bz2#7d1168349d6fba67ae1fdf61970b83e1 -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.2-pyhd8ed1ab_0.tar.bz2#ae8b866c376568b0342ae2c9b68f1e65 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.0.12-pyh9f0ad1d_0.tar.bz2#7544ed05bbbe9bb687bc9bcbe4d6cb46 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2021.6.1-pyhd8ed1ab_0.tar.bz2#b8dca3cd859c8a849042af6db1cbedca -https://conda.anaconda.org/conda-forge/linux-64/glib-2.68.3-h9c3ff4c_0.tar.bz2#90e989058c8b42e3ddee1560c534313b -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.4-hf529b03_2.tar.bz2#526fadaa13ec264cb919436953bc2766 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.10.6-mpi_mpich_h996c276_1014.tar.bz2#6af2e2e4dfb0ef36c35042cd69a1599d -https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-py_0.tar.bz2#77242bfb1e74a627fb06319b5a2d3b95 -https://conda.anaconda.org/conda-forge/noarch/idna-2.10-pyh9f0ad1d_0.tar.bz2#f95a12b4f435aae6680fe55ae2eb1b06 -https://conda.anaconda.org/conda-forge/noarch/imagesize-1.2.0-py_0.tar.bz2#5879bd2c4b399a5072468e5fe587bf1b -https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.3.0-pyh9f0ad1d_0.tar.bz2#e4a33192da1a6dc4967ba18c6c765945 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.2-h78a0170_0.tar.bz2#ac0c23e6f3bbb61569781f00b5666f97 -https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d -https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f -https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 -https://conda.anaconda.org/conda-forge/linux-64/proj-7.2.0-h277dcde_2.tar.bz2#db654ee11298d3463bad67445707654c -https://conda.anaconda.org/conda-forge/noarch/pycparser-2.20-pyh9f0ad1d_2.tar.bz2#aa798d50ffd182a0f6f31478c7f434f6 -https://conda.anaconda.org/conda-forge/noarch/pyke-1.1.1-pyhd8ed1ab_1004.tar.bz2#5f0236abfbb6d53826d1afed1e64f82e -https://conda.anaconda.org/conda-forge/noarch/pyparsing-2.4.7-pyh9f0ad1d_0.tar.bz2#626c4f20d5bf06dcec9cf2eaa31725c7 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.7-2_cp37m.tar.bz2#afff88bf9a7048da740c70aeb8cdbb82 -https://conda.anaconda.org/conda-forge/noarch/pytz-2021.1-pyhd8ed1ab_0.tar.bz2#3af2e9424d5eb0063824a3f9b850d411 -https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 -https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.1.0-pyhd8ed1ab_0.tar.bz2#f1d64c0cf0eedf655a96ccdc1573c05a -https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_0.tar.bz2#60e630285f44af05767dcb7f473ee03f -https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.1-py_0.tar.bz2#d1e66b58cb00b3817ad9f05eec098c00 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-3.10.0.0-pyha770c72_0.tar.bz2#67c0cba6533b641f28946d7c16f361c8 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.36.2-pyhd3deb0d_0.tar.bz2#768bfbe026426d0e76b377997d1f2b98 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.4.1-pyhd8ed1ab_0.tar.bz2#a4fa30eb74a326092b3d8078b1f1aae1 -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py37h89c1867_1002.tar.bz2#cf3aeeb80dbd517761019a8edcd5b108 -https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e -https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.5.30-py37h89c1867_0.tar.bz2#105f18ae8597a5f4d4e3188bcb06c796 -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.14.5-py37hc58025e_0.tar.bz2#e05f1fad0c52c21b6b92778d31f89cd0 -https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.0-pyhd8ed1ab_0.tar.bz2#a739adbf102868f675bf70601e0af7ea -https://conda.anaconda.org/conda-forge/linux-64/chardet-4.0.0-py37h89c1867_1.tar.bz2#f4fbd4721b80f0d6b53b3a3374914068 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.10.0-py_2.tar.bz2#f6d7c7e6d8f42cbbec7e07a8d879f91c -https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.23-py37hcd2ae1e_1.tar.bz2#c6a51028408bcb1a32dac34fb28b2a2e -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.11.0-py37h5e8e339_3.tar.bz2#2e89a6f3baf5eeb13763f61ea3d0601f -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h48d8840_2.tar.bz2#eba672c69baf366fdedd1c6f702dbb81 -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py37h89c1867_3.tar.bz2#3da23bcf1d502670cec18fd3a04f409b -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-2.8.1-h83ec7ef_0.tar.bz2#654935b08e8bd4a8cbf6a4253e290c04 -https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.5.0-py37h89c1867_0.tar.bz2#71a9d20403f28d15f7a94d0817584efa -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.1-py37h2527ec5_1.tar.bz2#61149814e0ea71cb5b44881c65d25f7b -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.0-mpi_mpich_hf07302c_2.tar.bz2#d76a3f327eb8e26b5ce6b042ac1abeb3 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py37h5e8e339_0.tar.bz2#90ad307f6997784664de956e09ec689e -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.0.3-py37h1e5cb63_7.tar.bz2#1c0450be22dc0fbffaabab1f415705d5 -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.2-py37h2527ec5_1.tar.bz2#07952b04eee02d873daa311a35b27454 -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.21.0-py37h038b26d_0.tar.bz2#bfafd109a1e559dcadd211cd6bc3d298 -https://conda.anaconda.org/conda-forge/noarch/packaging-20.9-pyh44b312d_0.tar.bz2#be69a38e912054a62dc82cc3c7711a64 -https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 -https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.2-py37h718be6c_0.tar.bz2#ecac4e308b87ff93d44ea5e56ab39084 -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.8.0-py37h5e8e339_1.tar.bz2#2923250371b05e798f3732531cdb5300 -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py37hcd2ae1e_7.tar.bz2#f94e01aa4abd458b556d68fdb5f19b99 -https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py37h89c1867_3.tar.bz2#bd069d59ee91a2e26552cd7bb4c64032 -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.1-py_0.tar.bz2#0d0150ed9c2d25817f5324108d3f7571 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py37h5e8e339_0.tar.bz2#3f1e633378cd3c8b1ce13b3f2f5eadd7 -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-5.4.1-py37h5e8e339_0.tar.bz2#090550b9425fe9a87dc1ec7fde201633 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py37h5e8e339_1.tar.bz2#92449128c4639feae48d731ef2186099 -https://conda.anaconda.org/conda-forge/noarch/zict-2.0.0-py_0.tar.bz2#4750152be22f24d695b3004c5e1712d3 -https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py37h5e8e339_1001.tar.bz2#871eed4ba322e7b3f200956a096b34e7 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.2.1-py37h161383b_1.tar.bz2#314ca8b00ed742f8c46a6cc68d84d90f -https://conda.anaconda.org/conda-forge/linux-64/click-8.0.1-py37h89c1867_0.tar.bz2#bb1ad97b5d8626f662b753f620c3c913 -https://conda.anaconda.org/conda-forge/linux-64/cryptography-3.4.7-py37h5d9358c_0.tar.bz2#d811fb6a96ae0cf8c0a17457a8e67ff4 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2021.6.2-pyhd8ed1ab_0.tar.bz2#a5a365e004f7cb59d652254800cc40b7 -https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py37h2527ec5_1.tar.bz2#100918f43247cedad74f2cf8dcbda5bc -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-4.5.0-hd8ed1ab_0.tar.bz2#37284dc55911fdf9b0b5e6fed56fb192 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.1-pyhd8ed1ab_0.tar.bz2#c647e77921fd3e245cdcc5b2d451a0f8 -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py37h902c9e0_1005.tar.bz2#40db532422636dd1e980154114486a00 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.3-mpi_mpich_h196b126_4.tar.bz2#e058f42a78ea8c965cf7335e28143c59 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.2.5-py37h219a48f_0.tar.bz2#729b59cb1fe712a4251aba132b087eb6 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.5-hb8ff022_0.tar.bz2#f4e263c4dfa15b6a97349782793d1ee7 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.1.1-py37h6f94858_1004.tar.bz2#42b37830a63405589fef3d13db505e7d -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.1.1-py37h902c9e0_3.tar.bz2#104648a5a091a493046a62704eef5c49 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.6.3-py37h29e03ee_0.tar.bz2#a469d02f72b9cef07f4408d419b17dcc -https://conda.anaconda.org/conda-forge/linux-64/setuptools-49.6.0-py37h89c1867_3.tar.bz2#928c178bf6805b8ab71fabaa620e0234 -https://conda.anaconda.org/conda-forge/linux-64/shapely-1.7.1-py37h2d1e849_5.tar.bz2#451beb59aca4c165e68fbe8be3a37149 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py37h89c1867_0.tar.bz2#50087f16f1a71581a0327956c80debc2 -https://conda.anaconda.org/conda-forge/linux-64/asv-0.4.2-py37hcd2ae1e_2.tar.bz2#a539a23d322e3976dda4af86e59b31ce -https://conda.anaconda.org/conda-forge/linux-64/bokeh-2.1.1-py37hc8dfbb8_0.tar.bz2#0927f1a093279ba797f014c5e484a58f -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.0-py37h6f94858_0.tar.bz2#cb9e2ae2948058a7c7b249e3a326692e -https://conda.anaconda.org/conda-forge/linux-64/distributed-2021.6.2-py37h89c1867_0.tar.bz2#c92280f3760f096f164808d8d745d0b1 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.1.1-mpi_mpich_h3dcaa78_100.tar.bz2#5b4bab1017226f2c03ba0fe02b783316 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h539f30e_1.tar.bz2#606777b4da3664d5c9415f5f165349fd -https://conda.anaconda.org/conda-forge/noarch/identify-2.2.10-pyhd8ed1ab_0.tar.bz2#1f9cd027f471e98e21d9740472b18096 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.0-pyhd8ed1ab_0.tar.bz2#e5a77472ae964f2835fce16355bbfe64 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.50.7-hc3c00ef_0.tar.bz2#63fb96444e336b3d937921223dd9a481 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.4.2-py37hdd32ed1_0.tar.bz2#ee755b80aae171058a46c5d7badd08ff -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.7-nompi_py37h946d57d_100.tar.bz2#217487caeb2c4cecb25f86d99cbe53b6 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/noarch/pip-21.1.2-pyhd8ed1ab_0.tar.bz2#dbd830edaffe5fc9ae6c1d425db2b5f2 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.9.0-pyhd8ed1ab_0.tar.bz2#a2d9bba43c9b80a42b0ccb9afd7223c2 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-20.0.1-pyhd8ed1ab_0.tar.bz2#92371c25994d0f5d28a01c1fb75ebf86 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py37he336c9b_7.tar.bz2#303251d6f2b9e60a0cd79480cf8507d2 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.19.0.post1-py37h0c48da3_0.tar.bz2#44add1e9550c8caf69ce64561ce73035 -https://conda.anaconda.org/conda-forge/noarch/dask-2021.6.2-pyhd8ed1ab_0.tar.bz2#138fd8d4293eba5dcfe8448ec54f09f2 -https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.1.1-mpi_mpich_py37hf719a8e_100.tar.bz2#d608536dd44b60da923950c60619583d -https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.47.3-h85b4f2f_0.tar.bz2#099cc43ac1c5bcce50318a9fc14a1d49 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.2.0-pyhd8ed1ab_2.tar.bz2#ca7d092db9ebbd9a0102710e8d4b4a28 -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.13.0-py37h89c1867_0.tar.bz2#a2ddf76626c4e7481f106fa08d5d77c5 -https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py37he336c9b_7.tar.bz2#2b1959f3a87b5ad66690340ef921323c -https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py37he336c9b_7.tar.bz2#15f5cbcafb4889bb41da2a0a0e338f2a -https://conda.anaconda.org/conda-forge/noarch/pyugrid-0.3.1-py_2.tar.bz2#7d7361886fbcf2be663fd185bf6d244d -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.6-pyhd8ed1ab_0.tar.bz2#dea5b6d93cfbfbc2a253168ad05b3f89 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py37h89c1867_7.tar.bz2#1754ec587a9ac26e9507fea7eb6bebc2 -https://conda.anaconda.org/conda-forge/noarch/requests-2.25.1-pyhd3deb0d_0.tar.bz2#ae687aba31a1c400192a86a2e993ffdc -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.4.2-py37h89c1867_0.tar.bz2#581de64cb6a7577b162e329efbcf1e4c -https://conda.anaconda.org/conda-forge/noarch/sphinx-3.5.4-pyh44b312d_0.tar.bz2#0ebc444f001f73c4f6de01057b0be392 -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.3.3-pyhd8ed1ab_0.tar.bz2#b066335fac136c776b7a441e35c1fcb2 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.9.0-pyhd8ed1ab_0.tar.bz2#5ef222a3e1b5904742e376e05046692b -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.5.2-pyhd3deb0d_0.tar.bz2#1a871a63c4be1bd47a7aa48b7417a426 -https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-0.5.2-pyhd8ed1ab_1.tar.bz2#7434e891fc767cb0d39d90751720c8ec diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock deleted file mode 100644 index a6ad914b71..0000000000 --- a/requirements/ci/nox.lock/py38-linux-64.lock +++ /dev/null @@ -1,226 +0,0 @@ -# platform: linux-64 -# env_hash: 35b1c159ac1a6d931e48b613bf6f328700533ebf5f968ea86bcf1fd4e43c777a -@EXPLICIT -https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.5.30-ha878542_0.tar.bz2#6a777890e94194dc94a29a76d2a7e721 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 -https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.35.1-hea4e1c9_2.tar.bz2#83610dba766a186bdc7a116053b782a4 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-9.3.0-hff62375_19.tar.bz2#c2d8da3cb171e4aa642d20c6e4e42a04 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-9.3.0-h6de172a_19.tar.bz2#cd9a24a8dde03ec0cf0e603b0bea85a1 -https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf -https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.25-ha770c72_2.tar.bz2#b1ba065c6d2b9468035472a9d63e5b08 -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-9.3.0-hff62375_19.tar.bz2#aea379bd68fdcdf9499fa1453f852ac1 -https://conda.anaconda.org/conda-forge/linux-64/libgomp-9.3.0-h2828fa1_19.tar.bz2#ab0a307912033126da02507b59e79ec9 -https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 -https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-9.3.0-h2828fa1_19.tar.bz2#9d5cdfc51476ee4dcdd96ed2dca3f943 -https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b -https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.17.1-h7f98852_1.tar.bz2#ed1dc233ed5e3eaa9bfbaac64d130c5e -https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.1-h9c3ff4c_0.tar.bz2#16054ef3cb3ec5d8d29d08772662f65d -https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.9.1-h9c3ff4c_2.tar.bz2#b9a6d9422aed3ad84ec6ccee9bfcaa0f -https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d -https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 -https://conda.anaconda.org/conda-forge/linux-64/icu-68.1-h58526e2_0.tar.bz2#fc7a4271dc2a7f4fd78cd63695baf7c3 -https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf -https://conda.anaconda.org/conda-forge/linux-64/jpeg-9d-h36c2ea0_0.tar.bz2#ea02ce6037dbe81803ae6123e5ba1568 -https://conda.anaconda.org/conda-forge/linux-64/lerc-2.2.1-h9c3ff4c_0.tar.bz2#ea833dcaeb9e7ac4fac521f1a7abec82 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.7-h7f98852_5.tar.bz2#10e242842cd30c59c12d79371dc0f583 -https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 -https://conda.anaconda.org/conda-forge/linux-64/libffi-3.3-h58526e2_2.tar.bz2#665369991d8dd290ac5ee92fce3e6bf5 -https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 -https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d -https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.15-pthreads_h8fe5266_1.tar.bz2#bb5527a16584426a897f22643d9a36a6 -https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h58526e2_1007.tar.bz2#7f6569a0c2f27acb8fc90600b382e544 -https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.0-h7f98852_2.tar.bz2#fb63a035a3b552c88a30d84b89ebf4c4 -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_0.tar.bz2#4eb64ee0d5cd43096ffcf843c76b05d4 -https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.2-h846660c_100.tar.bz2#0868d02349fc7e128d4bdc515b58dd7e -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.2-h58526e2_4.tar.bz2#509f2a21c4a09214cd737a480dfd80c9 -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.30-h9c3ff4c_0.tar.bz2#e6dc1f8f6e0bcebe8e3d8a5bca258dbe -https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1k-h7f98852_0.tar.bz2#07fae2cb088379c8441e0f3ffa1f4025 -https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 -https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 -https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 -https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 -https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 -https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h516909a_0.tar.bz2#03a530e925414902547cf48da7756db8 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h516909a_1010.tar.bz2#339cc5584e6d26bc73a875ba900028c3 -https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h0b5b191_1005.tar.bz2#ff6f69b593a9e74c0e6b61908ac513fa -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-9_openblas.tar.bz2#5f08755e98b2a43ca68124e629a5a0cb -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-hcdb4288_3.tar.bz2#d8f51405997093ff1799ded7650439c4 -https://conda.anaconda.org/conda-forge/linux-64/libllvm11-11.1.0-hf817b99_2.tar.bz2#646fa2f7c60b69ee8f918668e9c2fd31 -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.43.0-h812cca2_0.tar.bz2#1867d1e9658596b3fac8847a7702eef4 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.9.0-ha56f1ee_6.tar.bz2#f0dfb86444df325e599dbc3f4c0a3f5b -https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 -https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1003.tar.bz2#a9371e9e40aded194dcba1447606c9a1 -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h72842e0_0.tar.bz2#bd14fdf5b9ee5568056a40a6a2f41866 -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_0.tar.bz2#2d1b63c574f3e11157a07313e58ba7af -https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.10-h21135ba_1.tar.bz2#c647f70aa7e3d4cc4e029cc1c9a99953 -https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-h975c496_1.tar.bz2#e663bd5dbc8cc4c1647d9f51cf25872c -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 -https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.0-ha95c52a_0.tar.bz2#b56f94865e2de36abf054e7bfa499034 -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.1-hcc1bbae_0.tar.bz2#59b0695a515a6c54d45463dbf208ae38 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-9_openblas.tar.bz2#edee85b4f83376ceae81e0975b8bffa2 -https://conda.anaconda.org/conda-forge/linux-64/libclang-11.1.0-default_ha53f305_1.tar.bz2#b9b71585ca4fcb5d442c5a9df5dd7e98 -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.68.3-h3e27bee_0.tar.bz2#99416a3287216de097d503b827ad0bde -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-9_openblas.tar.bz2#572d84ab07962986f6dd8e4637a475ca -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-hf544144_1.tar.bz2#a65a4158716bd7d95bfa69bcfd83081c -https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b -https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.25-hfa10184_2.tar.bz2#5a35fdd2da4c2d5fdf20575d39c232e5 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.36.0-h9cd32fc_0.tar.bz2#d5bbac924cbda57469f43448d5236a50 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 -https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 -https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.68.3-h9c3ff4c_0.tar.bz2#2e9275303dd09a2e245faf31770a1416 -https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.4-h76c114f_2.tar.bz2#5db765d4974fa89f64c1544eb2a552cb -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.77.0-h2574ce0_0.tar.bz2#05cf8dca8408b5f1ffcc5e2d5a7c5da2 -https://conda.anaconda.org/conda-forge/linux-64/libpq-13.3-hd57d9b9_0.tar.bz2#66ef2cacc483205b7d303f7b02601c3b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.0-h3452ae3_0.tar.bz2#8f4e19a8988c38feec7db41bcd0bf0d0 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.67-hb5efdd6_0.tar.bz2#3f2a4bc7d5fded1327ff1b8c61faae53 -https://conda.anaconda.org/conda-forge/linux-64/python-3.8.10-h49503c6_1_cpython.tar.bz2#69f7d6ef1f00c3a109b1b06279e6d6a9 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb -https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 -https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h6cf1ce9_1008.tar.bz2#a43fb47d15e116f8be4be7e6b17ab59f -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-1.6.0-py_0.tar.bz2#76d764d8881719e305f6fa368dc2b65e -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f -https://conda.anaconda.org/conda-forge/linux-64/curl-7.77.0-hea6ffbf_0.tar.bz2#7d1168349d6fba67ae1fdf61970b83e1 -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.2-pyhd8ed1ab_0.tar.bz2#ae8b866c376568b0342ae2c9b68f1e65 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.0.12-pyh9f0ad1d_0.tar.bz2#7544ed05bbbe9bb687bc9bcbe4d6cb46 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2021.6.1-pyhd8ed1ab_0.tar.bz2#b8dca3cd859c8a849042af6db1cbedca -https://conda.anaconda.org/conda-forge/linux-64/glib-2.68.3-h9c3ff4c_0.tar.bz2#90e989058c8b42e3ddee1560c534313b -https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.4-hf529b03_2.tar.bz2#526fadaa13ec264cb919436953bc2766 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.10.6-mpi_mpich_h996c276_1014.tar.bz2#6af2e2e4dfb0ef36c35042cd69a1599d -https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-py_0.tar.bz2#77242bfb1e74a627fb06319b5a2d3b95 -https://conda.anaconda.org/conda-forge/noarch/idna-2.10-pyh9f0ad1d_0.tar.bz2#f95a12b4f435aae6680fe55ae2eb1b06 -https://conda.anaconda.org/conda-forge/noarch/imagesize-1.2.0-py_0.tar.bz2#5879bd2c4b399a5072468e5fe587bf1b -https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.3.0-pyh9f0ad1d_0.tar.bz2#e4a33192da1a6dc4967ba18c6c765945 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.2-h78a0170_0.tar.bz2#ac0c23e6f3bbb61569781f00b5666f97 -https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d -https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f -https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 -https://conda.anaconda.org/conda-forge/linux-64/proj-7.2.0-h277dcde_2.tar.bz2#db654ee11298d3463bad67445707654c -https://conda.anaconda.org/conda-forge/noarch/pycparser-2.20-pyh9f0ad1d_2.tar.bz2#aa798d50ffd182a0f6f31478c7f434f6 -https://conda.anaconda.org/conda-forge/noarch/pyke-1.1.1-pyhd8ed1ab_1004.tar.bz2#5f0236abfbb6d53826d1afed1e64f82e -https://conda.anaconda.org/conda-forge/noarch/pyparsing-2.4.7-pyh9f0ad1d_0.tar.bz2#626c4f20d5bf06dcec9cf2eaa31725c7 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-2_cp38.tar.bz2#bfbb29d517281e78ac53e48d21e6e860 -https://conda.anaconda.org/conda-forge/noarch/pytz-2021.1-pyhd8ed1ab_0.tar.bz2#3af2e9424d5eb0063824a3f9b850d411 -https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 -https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.1.0-pyhd8ed1ab_0.tar.bz2#f1d64c0cf0eedf655a96ccdc1573c05a -https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_0.tar.bz2#60e630285f44af05767dcb7f473ee03f -https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.1-py_0.tar.bz2#d1e66b58cb00b3817ad9f05eec098c00 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.36.2-pyhd3deb0d_0.tar.bz2#768bfbe026426d0e76b377997d1f2b98 -https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1002.tar.bz2#2b2207e2c8a05fc0bc5b62fc32c355e6 -https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e -https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.5.30-py38h578d9bd_0.tar.bz2#a2e14464711f8e76010cd7e0c49bc4ae -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.14.5-py38ha65f79e_0.tar.bz2#386057f231a571b75bfa7307c9acd5f6 -https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.0-pyhd8ed1ab_0.tar.bz2#a739adbf102868f675bf70601e0af7ea -https://conda.anaconda.org/conda-forge/linux-64/chardet-4.0.0-py38h578d9bd_1.tar.bz2#9294a5e2c7545a2f67ac348aadd53344 -https://conda.anaconda.org/conda-forge/linux-64/click-8.0.1-py38h578d9bd_0.tar.bz2#45426acde32f0ddd94dcee3478fd13e3 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.10.0-py_2.tar.bz2#f6d7c7e6d8f42cbbec7e07a8d879f91c -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.11.0-py38h497a2fe_3.tar.bz2#45568bae22c3825f22b631101ecbad35 -https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h48d8840_2.tar.bz2#eba672c69baf366fdedd1c6f702dbb81 -https://conda.anaconda.org/conda-forge/linux-64/docutils-0.15.2-py38h578d9bd_2.tar.bz2#a62e348e72a6122a17d2146d8c5eb4e1 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-2.8.1-h83ec7ef_0.tar.bz2#654935b08e8bd4a8cbf6a4253e290c04 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.1-py38h1fd1430_1.tar.bz2#01488c80daae318ed5c17e7bb12af64e -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.0-mpi_mpich_hf07302c_2.tar.bz2#d76a3f327eb8e26b5ce6b042ac1abeb3 -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py38h497a2fe_0.tar.bz2#d075babffd68330d81b0488a45435698 -https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.0.3-py38he865349_7.tar.bz2#afbbb1e0ce578e537b2ec82563988417 -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.2-py38h1fd1430_1.tar.bz2#5854c568e0d341313fb0a6487f1c687e -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.21.0-py38h9894fe3_0.tar.bz2#10aebb0451bda480ef14a25657c576f3 -https://conda.anaconda.org/conda-forge/noarch/packaging-20.9-pyh44b312d_0.tar.bz2#be69a38e912054a62dc82cc3c7711a64 -https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 -https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.1-py38hd70f55b_1.tar.bz2#80d719bee2b77a106b199150c0829107 -https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 -https://conda.anaconda.org/conda-forge/linux-64/psutil-5.8.0-py38h497a2fe_1.tar.bz2#3c465545aa3cec37f8f1341546677956 -https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py38h709712a_7.tar.bz2#e012838bbbe92f6a458c2584634830f1 -https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py38h578d9bd_3.tar.bz2#8284bab4783fd6fdd11b695958945614 -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.1-py_0.tar.bz2#0d0150ed9c2d25817f5324108d3f7571 -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py38h497a2fe_0.tar.bz2#27b37e3f79205080b573442445ed727b -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-5.4.1-py38h497a2fe_0.tar.bz2#36d6e06148013694eb943576cd305f67 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py38h497a2fe_1.tar.bz2#e772c8383768280af283e814e2126663 -https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py38h578d9bd_0.tar.bz2#24b5e0295c43de15a51afb00f93a41de -https://conda.anaconda.org/conda-forge/noarch/zict-2.0.0-py_0.tar.bz2#4750152be22f24d695b3004c5e1712d3 -https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h497a2fe_1001.tar.bz2#56753dd777a6517b34966ddcb39af734 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.2.1-py38hab2c0dc_1.tar.bz2#777186ded2d850f3eab4ce7131c6c17c -https://conda.anaconda.org/conda-forge/linux-64/cryptography-3.4.7-py38ha5dfef3_0.tar.bz2#a8b014aba670157256dabdc885f71af4 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2021.6.2-pyhd8ed1ab_0.tar.bz2#a5a365e004f7cb59d652254800cc40b7 -https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py38h1fd1430_1.tar.bz2#03bbd69539712a691b0a43bd4a49976e -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.1-pyhd8ed1ab_0.tar.bz2#c647e77921fd3e245cdcc5b2d451a0f8 -https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h5c078b8_1005.tar.bz2#d318a411c4cb595d5adb60ec7b4a46f0 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.3-mpi_mpich_h196b126_4.tar.bz2#e058f42a78ea8c965cf7335e28143c59 -https://conda.anaconda.org/conda-forge/linux-64/pandas-1.2.5-py38h1abd341_0.tar.bz2#b7c0ddb0b4a016268bd915d8fb55693f -https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.5-hb8ff022_0.tar.bz2#f4e263c4dfa15b6a97349782793d1ee7 -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38hb5d20a5_0.tar.bz2#cc6852249c01884469560082943b689f -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.1.1-py38h5c078b8_3.tar.bz2#dafeef887e68bd18ec84681747ca0fd5 -https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.6.3-py38h7b17777_0.tar.bz2#8055079ed82e1ada1cc4714c26d04802 -https://conda.anaconda.org/conda-forge/linux-64/setuptools-49.6.0-py38h578d9bd_3.tar.bz2#59c561cd1be0db9cf1c83f7d7cc74f4d -https://conda.anaconda.org/conda-forge/linux-64/shapely-1.7.1-py38haeee4fe_5.tar.bz2#2e633d8e2257f3c0e465c858ce2ddbc6 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 -https://conda.anaconda.org/conda-forge/linux-64/asv-0.4.2-py38h709712a_2.tar.bz2#4659f315fc42e671606fbcd1b9234f75 -https://conda.anaconda.org/conda-forge/linux-64/bokeh-1.4.0-py38h32f6830_1.tar.bz2#7074fc3ef551c1aa1e10393436de021c -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.0-py38hb5d20a5_0.tar.bz2#4de86c142bd3846d95d11b32567ca503 -https://conda.anaconda.org/conda-forge/linux-64/distributed-2021.6.2-py38h578d9bd_0.tar.bz2#87cccb37c2a1e0ec30d3715431dee080 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.1.1-mpi_mpich_h3dcaa78_100.tar.bz2#5b4bab1017226f2c03ba0fe02b783316 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h539f30e_1.tar.bz2#606777b4da3664d5c9415f5f165349fd -https://conda.anaconda.org/conda-forge/noarch/identify-2.2.10-pyhd8ed1ab_0.tar.bz2#1f9cd027f471e98e21d9740472b18096 -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.0-pyhd8ed1ab_0.tar.bz2#e5a77472ae964f2835fce16355bbfe64 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.50.7-hc3c00ef_0.tar.bz2#63fb96444e336b3d937921223dd9a481 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.4.2-py38hcc49a3a_0.tar.bz2#4bfb6818a1fce6d4129fdf121f788505 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.7-nompi_py38h5e9db54_100.tar.bz2#5f86dd7381e37db378068abd7707cd57 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 -https://conda.anaconda.org/conda-forge/noarch/pip-21.1.2-pyhd8ed1ab_0.tar.bz2#dbd830edaffe5fc9ae6c1d425db2b5f2 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.9.0-pyhd8ed1ab_0.tar.bz2#a2d9bba43c9b80a42b0ccb9afd7223c2 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-20.0.1-pyhd8ed1ab_0.tar.bz2#92371c25994d0f5d28a01c1fb75ebf86 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h7400c14_7.tar.bz2#8fe28c949b01e3d69c2b357b5abf3916 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.19.0.post1-py38hc9c980b_0.tar.bz2#65e97172e139d3465895eb07a1fd52f2 -https://conda.anaconda.org/conda-forge/noarch/dask-2021.6.2-pyhd8ed1ab_0.tar.bz2#138fd8d4293eba5dcfe8448ec54f09f2 -https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.1.1-mpi_mpich_py38h7f78e9f_100.tar.bz2#ce0ac0d6f5e6c5e7e0c613b08b3a0960 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.47.3-h85b4f2f_0.tar.bz2#099cc43ac1c5bcce50318a9fc14a1d49 -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.2.0-pyhd8ed1ab_2.tar.bz2#ca7d092db9ebbd9a0102710e8d4b4a28 -https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.13.0-py38h578d9bd_0.tar.bz2#1013dff06f574377c64f11efa7e2c016 -https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_7.tar.bz2#3003444b4f41742a33b7afdeb3260cbc -https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_7.tar.bz2#1c17944e118b314ff4d0bfc05f03a5e1 -https://conda.anaconda.org/conda-forge/noarch/pyugrid-0.3.1-py_2.tar.bz2#7d7361886fbcf2be663fd185bf6d244d -https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.6-pyhd8ed1ab_0.tar.bz2#dea5b6d93cfbfbc2a253168ad05b3f89 -https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py38h578d9bd_7.tar.bz2#7166890c160d0441f59973a40b74f6e5 -https://conda.anaconda.org/conda-forge/noarch/requests-2.25.1-pyhd3deb0d_0.tar.bz2#ae687aba31a1c400192a86a2e993ffdc -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.4.2-py38h578d9bd_0.tar.bz2#82aa0479b2189ab97f9e70b90d7ec866 -https://conda.anaconda.org/conda-forge/noarch/sphinx-3.5.4-pyh44b312d_0.tar.bz2#0ebc444f001f73c4f6de01057b0be392 -https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.3.3-pyhd8ed1ab_0.tar.bz2#b066335fac136c776b7a441e35c1fcb2 -https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.9.0-pyhd8ed1ab_0.tar.bz2#5ef222a3e1b5904742e376e05046692b -https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.5.2-pyhd3deb0d_0.tar.bz2#1a871a63c4be1bd47a7aa48b7417a426 -https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-0.5.2-pyhd8ed1ab_1.tar.bz2#7434e891fc767cb0d39d90751720c8ec diff --git a/requirements/ci/py37.yml b/requirements/ci/py37.yml index fb9f5b38f8..fac21560a4 100644 --- a/requirements/ci/py37.yml +++ b/requirements/ci/py37.yml @@ -8,7 +8,6 @@ dependencies: # Setup dependencies. - setuptools>=40.8.0 - - pyke # Core dependencies. - cartopy>=0.18 diff --git a/requirements/ci/py38.yml b/requirements/ci/py38.yml index ed81a7aaa8..4be43fdba6 100644 --- a/requirements/ci/py38.yml +++ b/requirements/ci/py38.yml @@ -8,7 +8,6 @@ dependencies: # Setup dependencies. - setuptools>=40.8.0 - - pyke # Core dependencies. - cartopy>=0.18 diff --git a/setup.cfg b/setup.cfg index b169303498..73714e0a5f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,7 +55,6 @@ install_requires = netcdf4 numpy>=1.14 scipy - scitools-pyke xxhash packages = find: package_dir = diff --git a/setup.py b/setup.py index 6ecc956430..f48f3fe25a 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,6 @@ import os from shutil import copyfile import sys -import textwrap from setuptools import Command, setup from setuptools.command.build_py import build_py @@ -63,50 +62,6 @@ def run(self): os.remove(compiled_path) -def compile_pyke_rules(cmd, directory): - # Call out to the python executable to pre-compile the Pyke rules. - # Significant effort was put in to trying to get these to compile - # within this build process but there was no obvious way of finding - # a workaround to the issue presented in - # https://github.com/SciTools/iris/issues/2481. - - shelled_code = textwrap.dedent( - """\ - - import os - - # Monkey patch the load method to avoid "ModuleNotFoundError: No module - # named 'iris.fileformats._pyke_rules.compiled_krb'". In this instance - # we simply don't want the knowledge engine, so we turn the load method - # into a no-op. - from pyke.target_pkg import target_pkg - target_pkg.load = lambda *args, **kwargs: None - - # Compile the rules by hand, without importing iris. That way we can - # avoid the need for all of iris' dependencies being installed. - os.chdir(os.path.join('{bld_dir}', 'iris', 'fileformats', '_pyke_rules')) - - # Import pyke *after* changing directory. Without this we get the compiled - # rules in the wrong place. Identified in - # https://github.com/SciTools/iris/pull/2891#issuecomment-341404187 - from pyke import knowledge_engine - knowledge_engine.engine('') - - """.format( - bld_dir=directory - ) - ).split("\n") - shelled_code = "; ".join( - [ - line - for line in shelled_code - if not line.strip().startswith("#") and line.strip() - ] - ) - args = [sys.executable, "-c", shelled_code] - cmd.spawn(args) - - def copy_copyright(cmd, directory): # Copy the COPYRIGHT information into the package root iris_build_dir = os.path.join(directory, "iris") @@ -154,20 +109,13 @@ def run(self): custom_commands = { "test": SetupTestRunner, - "develop": custom_cmd(develop_cmd, [build_std_names, compile_pyke_rules]), - "build_py": custom_cmd( - build_py, [build_std_names, compile_pyke_rules, copy_copyright] - ), + "develop": custom_cmd(develop_cmd, [build_std_names]), + "build_py": custom_cmd(build_py, [build_std_names, copy_copyright]), "std_names": custom_cmd( BaseCommand, [build_std_names], help_doc="generate CF standard name module", ), - "pyke_rules": custom_cmd( - BaseCommand, - [compile_pyke_rules], - help_doc="compile CF-NetCDF loader rules", - ), "clean_source": CleanSource, } From b0ac323ada52a1e9f98378cd927e2ed2a507c2c5 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 23 Jun 2021 16:12:10 +0100 Subject: [PATCH 44/53] Ensure a definite ordering for formula-root processing. --- lib/iris/fileformats/netcdf.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index df62027350..175f3dc653 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -462,19 +462,25 @@ def _assert_case_specific_facts(engine, cf, cf_group): # Assert facts for CF formula terms associated with the cf_group # of the CF data variable. - formula_root = set() + + # Collect varnames of formula-root variables as we go. + # NOTE: use dictionary keys as an 'OrderedDict' + # - see: https://stackoverflow.com/a/53657523/2615050 + # This is to ensure that we can handle the resulting facts in a definite + # order, as using a 'set' led to indeterminate results. + formula_root = {} for cf_var in cf.cf_group.formula_terms.values(): for cf_root, cf_term in cf_var.cf_terms_by_root.items(): # Only assert this fact if the formula root variable is # defined in the CF group of the CF data variable. if cf_root in cf_group: - formula_root.add(cf_root) + formula_root[cf_root] = True engine.add_case_specific_fact( "formula_term", (cf_var.cf_name, cf_root, cf_term), ) - for cf_root in formula_root: + for cf_root in formula_root.keys(): engine.add_case_specific_fact("formula_root", (cf_root,)) From c26db97f35812cf5e78e02b952b3dbf5eff32751 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 23 Jun 2021 17:25:57 +0100 Subject: [PATCH 45/53] Odd corrections to comments. --- lib/iris/fileformats/_nc_load_rules/engine.py | 4 ++-- .../fileformats/_nc_load_rules/helpers.py | 6 +++-- lib/iris/fileformats/netcdf.py | 6 +++++ .../nc_load_rules/actions/__init__.py | 24 +++++++------------ .../actions/test__time_coords.py | 4 ++-- .../tests/unit/fileformats/netcdf/__init__.py | 2 +- 6 files changed, 23 insertions(+), 23 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index ee2cc1bb29..497c2a12c9 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -68,7 +68,7 @@ class Engine: Provides just enough API so that the existing code in :mod:`iris.fileformats.netcdf` can interface with our new rules functions. - A list of possible fact-arglists is store, for each of a set of fact-names + A list of possible fact-arglists is stored, for each of a set of fact-names (which are strings). Each fact-argslist is represented by a tuple of values -- at present, in practice, those are all strings too. @@ -88,7 +88,7 @@ def activate(self): Run all the translation rules to produce a single output cube. This implicitly references the output variable for this operation, - set by engine.cf_var (the variable name). + set by engine.cf_var (a CFDataVariable). The rules operation itself is coded elsewhere, in :mod:`iris.fileformats.netcdf._nc_load_rules.actions`. diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 458e130ac3..a5b507d583 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -7,8 +7,10 @@ All the pure-Python 'helper' functions which were previously included in the Pyke rules database 'fc_rules_cf.krb'. -Initially these haven't changed. -The new rules approach is still calling most of them. +The 'action' routines now call these, as the rules used to do. +They have not changed, **except** that the 'build_coordinate_system' routine +acquired an extra initial 'engine' argument, purely for consistency with other +build routines, and which it does not use. """ diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 175f3dc653..4c69a3a002 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -549,6 +549,12 @@ def _get_cf_var_data(cf_var, filename): class OrderedAddableList(list): + # Used purely in actions debugging, to accumulate a record of which actions + # were activated. + # It replaces a set, so as to record the ordering of operations, with + # possible repeats, and it also numbers the entries. + # Actions routines invoke the 'add' method, which thus effectively converts + # a set.add into a list.append. def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._n_add = 0 diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 678195c6a1..80b3a0dbef 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -49,25 +49,18 @@ class Mixin__nc_load_actions: actual Test_XXX class which also inherits unittest.TestCase. Testcases are manufactured by the '_make_testcase_cdl' method. - These are based on a 'standard simple latlon grid' example. - Various kwargs control variations on this. - The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes - a result cube (by: producing cdl, converting to netcdf, and loading). - - The 'check_result' method performs various checks on the result, with - kwargs controlling the expected properties to be tested against. - This usage is *also* based on the 'standard simple latlon grid' example, - the kwargs specify expected differences from that. - - Can also test with either the Pyke(rules) or non-Pyke (actions) - implementations (for now). + a result cube (by: producing cdl, converting to netcdf, and loading the + 'phenom' variable only). + Likewise, a generalised 'check_result' method will be used to perform result + checking. + Both '_make_testcase_cdl' and 'check_result' are not defined here : They + are to be variously implemented by the inheritors. """ # "global" test setting : whether to output various debug info - # TODO: ?possibly? remove when development is complete - debug = False + debug = True # whether to perform action in both ways and compare results. @classmethod @@ -98,8 +91,7 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): # Simulate the inner part of the file reading process. cf = CFReader(nc_path) - # Grab a data variable : FOR NOW, should be only 1 - cf_var = list(cf.cf_group.data_variables.values())[0] + # Grab a data variable : FOR NOW always grab the 'phenom' variable. cf_var = cf.cf_group.data_variables["phenom"] engine = iris.fileformats.netcdf._actions_engine() diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index 32f6dafc67..f0be79d18b 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -54,11 +54,11 @@ def _make_testcase_cdl( opt_t = None opt_p = None if time_opts is not None: - # Replace 'True' with an options dict for 'time' options + # Convert a non-null kwarg into an options dict for 'time' options opt_t = Opts(**_COORD_OPTIONS_TEMPLATE) opt_t.update(which="time", **time_opts) if period_opts is not None: - # Replace 'True' with an options dict for 'period' options + # Convert a non-null kwarg into an options dict for 'period' options opt_p = Opts(**_COORD_OPTIONS_TEMPLATE) opt_p.update(which="period", **period_opts) diff --git a/lib/iris/tests/unit/fileformats/netcdf/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/__init__.py index b8d0502075..732094f67a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/__init__.py @@ -3,4 +3,4 @@ # This file is part of Iris and is released under the LGPL license. # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. -"""Unit tests for the :mod:`iris.fileformats.netcdf._nc_load_rules` module.""" +"""Unit tests for the :mod:`iris.fileformats.netcdf` module.""" From 2f9d462c11c754f26cba5282ce845a548d61925f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 23 Jun 2021 18:20:53 +0100 Subject: [PATCH 46/53] Updated all 'rules triggered' comments. --- .../actions/test__grid_mappings.py | 216 ++++++++++-------- .../actions/test__hybrid_formulae.py | 29 ++- .../actions/test__time_coords.py | 37 ++- 3 files changed, 152 insertions(+), 130 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index ebea5acd45..a4cb774249 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -343,11 +343,11 @@ def test_basic_latlon(self): # A basic reference example with a lat-long grid. # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_latitude_longitude - # 003 : fc_provides_coordinate_latitude - # 004 : fc_provides_coordinate_longitude - # 005 : fc_build_coordinate_latitude - # 006 : fc_build_coordinate_longitude + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude) + # 006 : fc_build_coordinate_(longitude) # Notes: # grid-mapping: regular latlon # dim-coords: lat+lon @@ -367,10 +367,11 @@ def test_bad_gridmapping_nameproperty(self): # Fix the 'grid' var so it does not register as a grid-mapping. # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_coordinate_latitude - # 003 : fc_provides_coordinate_longitude - # 004 : fc_build_coordinate_latitude_nocs - # 005 : fc_build_coordinate_longitude_nocs + # 002 : fc_provides_grid_mapping --FAILED(no grid-mapping attr) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude)(no-cs) + # 006 : fc_build_coordinate_(longitude)(no-cs) # Notes: # grid-mapping: NONE # dim-coords: lat+lon @@ -383,10 +384,10 @@ def test_latlon_bad_gridmapping_varname(self): # (I.E. the var named in "data-variable:grid_mapping" does not exist). # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_coordinate_latitude - # 003 : fc_provides_coordinate_longitude - # 004 : fc_build_coordinate_latitude_nocs - # 005 : fc_build_coordinate_longitude_nocs + # 002 : fc_provides_coordinate_(latitude) + # 003 : fc_provides_coordinate_(longitude) + # 004 : fc_build_coordinate_(latitude)(no-cs) + # 005 : fc_build_coordinate_(longitude)(no-cs) # Notes: # no coord-system # all the same as test_bad_gridmapping_nameproperty @@ -399,10 +400,11 @@ def test_latlon_bad_latlon_unit(self): # # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_latitude_longitude - # 003 : fc_provides_coordinate_longitude - # 004 : fc_build_coordinate_longitude - # 005 : fc_default_coordinate + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_default_coordinate_(provide-phase) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(miscellaneous) + # 006 : fc_build_coordinate_(longitude) # Notes: # grid-mapping: regular latlon # dim-coords: @@ -424,11 +426,11 @@ def test_mapping_rotated(self): # # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_rotated_latitude_longitude - # 003 : fc_provides_coordinate_latitude - # 004 : fc_provides_coordinate_longitude - # 005 : fc_build_coordinate_latitude_rotated - # 006 : fc_build_coordinate_longitude_rotated + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_provides_coordinate_(rotated_latitude) + # 004 : fc_provides_coordinate_(rotated_longitude) + # 005 : fc_build_coordinate_(rotated_latitude)(rotated) + # 006 : fc_build_coordinate_(rotated_longitude)(rotated) # Notes: # grid-mapping: rotated lat-lon # dim-coords: lat+lon @@ -450,11 +452,11 @@ def test_mapping_rotated(self): # # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_ - # 003 : fc_provides_projection_x_coordinate - # 004 : fc_provides_projection_y_coordinate - # 005 : fc_build_coordinate_projection_x_ - # 006 : fc_build_coordinate_projection_y_ + # 002 : fc_provides_grid_mapping_() + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y) + # 006 : fc_build_coordinate_(projection_x) # Notes: # grid-mapping: # dim-coords: proj-x and -y @@ -491,8 +493,11 @@ def test_mapping_mercator(self): def test_mapping_mercator__fail_unsupported(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_projection_x_coordinate - # 003 : fc_provides_projection_y_coordinate + # 002 : fc_provides_grid_mapping_(mercator) --(FAILED check has_supported_mercator_parameters) + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) + # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) # Notes: # grid-mapping: NONE # dim-coords: proj-x and -y @@ -515,8 +520,11 @@ def test_mapping_stereographic(self): def test_mapping_stereographic__fail_unsupported(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_projection_x_coordinate - # 003 : fc_provides_projection_y_coordinate + # 002 : fc_provides_grid_mapping_(stereographic) --(FAILED check has_supported_stereographic_parameters) + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) + # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) # Notes: # as for 'mercator__fail_unsupported', above # = NO dim-coords built (cube has no coords) @@ -547,8 +555,11 @@ def test_mapping_unsupported(self): # # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_projection_x_coordinate - # 003 : fc_provides_projection_y_coordinate + # 002 : fc_provides_grid_mapping --FAILED(unhandled type azimuthal_equidistant) + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) + # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) # NOTES: # - there is no warning for this. # TODO: perhaps there should be ? @@ -562,8 +573,11 @@ def test_mapping_undefined(self): # # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_projection_x_coordinate - # 003 : fc_provides_projection_y_coordinate + # 002 : fc_provides_grid_mapping --FAILED(unhandled type unknown) + # 003 : fc_provides_coordinate_(projection_y) + # 004 : fc_provides_coordinate_(projection_x) + # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) + # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) # NOTES: # - there is no warning for this. # TODO: perhaps there should be ? @@ -580,9 +594,11 @@ def test_mapping_undefined(self): def test_mapping__mismatch__latlon_coords_rotated_system(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_rotated_latitude_longitude - # 003 : fc_provides_coordinate_latitude - # 004 : fc_provides_coordinate_longitude + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude)(FAILED : latlon coord with rotated cs) + # 006 : fc_build_coordinate_(longitude)(FAILED : latlon coord with rotated cs) # NOTES: # no build_coord triggers, as it requires the correct mapping type # so no dim-coords at all in this case @@ -598,11 +614,11 @@ def test_mapping__mismatch__latlon_coords_rotated_system(self): def test_mapping__mismatch__latlon_coords_nonll_system(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_albers_equal_area - # 003 : fc_provides_coordinate_latitude - # 004 : fc_provides_coordinate_longitude - # 005 : fc_build_coordinate_latitude_nocs - # 006 : fc_build_coordinate_longitude_nocs + # 002 : fc_provides_grid_mapping_(albers_conical_equal_area) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude)(no-cs : discarded projected cs) + # 006 : fc_build_coordinate_(longitude)(no-cs : discarded projected cs) # NOTES: # build_coord_XXX_cs triggers, requires NO latlon/rotated mapping # - but a non-ll mapping is 'ok'. @@ -619,10 +635,10 @@ def test_mapping__mismatch__latlon_coords_nonll_system(self): def test_mapping__mismatch__latlon_coords_missing_system(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_coordinate_latitude - # 003 : fc_provides_coordinate_longitude - # 004 : fc_build_coordinate_latitude_nocs - # 005 : fc_build_coordinate_longitude_nocs + # 002 : fc_provides_coordinate_(latitude) + # 003 : fc_provides_coordinate_(longitude) + # 004 : fc_build_coordinate_(latitude)(no-cs) + # 005 : fc_build_coordinate_(longitude)(no-cs) # NOTES: # same as nonll, except *NO* grid-mapping is detected, # - which makes no practical difference @@ -640,9 +656,11 @@ def test_mapping__mismatch__latlon_coords_missing_system(self): def test_mapping__mismatch__rotated_coords_latlon_system(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_latitude_longitude - # 003 : fc_provides_coordinate_latitude - # 004 : fc_provides_coordinate_longitude + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(rotated_latitude) + # 004 : fc_provides_coordinate_(rotated_longitude) + # 005 : fc_build_coordinate_(rotated_latitude)(FAILED rotated coord with latlon cs) + # 006 : fc_build_coordinate_(rotated_longitude)(FAILED rotated coord with latlon cs) # NOTES: # no build_coord triggers : requires NO latlon/rotated mapping # hence no coords at all @@ -657,11 +675,11 @@ def test_mapping__mismatch__rotated_coords_latlon_system(self): def test_mapping__mismatch__rotated_coords_nonll_system(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_albers_equal_area - # 003 : fc_provides_coordinate_latitude - # 004 : fc_provides_coordinate_longitude - # 005 : fc_build_coordinate_latitude_nocs - # 006 : fc_build_coordinate_longitude_nocs + # 002 : fc_provides_grid_mapping_(albers_conical_equal_area) + # 003 : fc_provides_coordinate_(rotated_latitude) + # 004 : fc_provides_coordinate_(rotated_longitude) + # 005 : fc_build_coordinate_(rotated_latitude)(rotated no-cs : discarded projected cs) + # 006 : fc_build_coordinate_(rotated_longitude)(rotated no-cs : discarded projected cs) # NOTES: # this is different from the previous # build_coord.._nocs triggers : requires NO latlon/rotated mapping @@ -679,10 +697,10 @@ def test_mapping__mismatch__rotated_coords_nonll_system(self): def test_mapping__mismatch__rotated_coords_missing_system(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_coordinate_latitude - # 003 : fc_provides_coordinate_longitude - # 004 : fc_build_coordinate_latitude_nocs - # 005 : fc_build_coordinate_longitude_nocs + # 002 : fc_provides_coordinate_(rotated_latitude) + # 003 : fc_provides_coordinate_(rotated_longitude) + # 004 : fc_build_coordinate_(rotated_latitude)(rotated no-cs) + # 005 : fc_build_coordinate_(rotated_longitude)(rotated no-cs) # NOTES: # as previous, but no grid-mapping (which makes no difference) warning = "Missing.*grid mapping variable 'grid'" @@ -699,9 +717,11 @@ def test_mapping__mismatch__rotated_coords_missing_system(self): def test_mapping__mismatch__nonll_coords_latlon_system(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_latitude_longitude - # 003 : fc_default_coordinate - # 004 : fc_default_coordinate + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_default_coordinate_(provide-phase) + # 004 : fc_default_coordinate_(provide-phase) + # 005 : fc_build_coordinate_(miscellaneous) + # 006 : fc_build_coordinate_(miscellaneous) # NOTES: # dim-coords built as "defaults" : dim-coords, but NO standard name result = self.run_testcase( @@ -717,9 +737,11 @@ def test_mapping__mismatch__nonll_coords_latlon_system(self): def test_mapping__mismatch__nonll_coords_rotated_system(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_rotated_latitude_longitude - # 003 : fc_default_coordinate - # 004 : fc_default_coordinate + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_default_coordinate_(provide-phase) + # 004 : fc_default_coordinate_(provide-phase) + # 005 : fc_build_coordinate_(miscellaneous) + # 006 : fc_build_coordinate_(miscellaneous) # NOTES: # same as previous __mismatch__nonll_ result = self.run_testcase( @@ -736,8 +758,10 @@ def test_mapping__mismatch__nonll_coords_rotated_system(self): def test_mapping__mismatch__nonll_coords_missing_system(self): # Rules Triggered: # 001 : fc_default - # 002 : fc_default_coordinate - # 003 : fc_default_coordinate + # 002 : fc_default_coordinate_(provide-phase) + # 003 : fc_default_coordinate_(provide-phase) + # 004 : fc_build_coordinate_(miscellaneous) + # 005 : fc_build_coordinate_(miscellaneous) # NOTES: # effectively, just like previous 2 __mismatch__nonll_ warning = "Missing.*grid mapping variable 'grid'" @@ -768,21 +792,21 @@ def test_aux_lon(self): # Change the name of xdim, and put xco on the coords list. # # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_latitude_longitude - # 003 : fc_provides_coordinate_latitude - # 004 : fc_build_auxiliary_coordinate_longitude - # 005 : fc_build_coordinate_latitude + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_build_coordinate_(latitude) + # 005 : fc_build_auxiliary_coordinate_longitude result = self.run_testcase(xco_is_dim=False) self.check_result(result, xco_is_aux=True, xco_no_cs=True) def test_aux_lat(self): # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_latitude_longitude - # 003 : fc_provides_coordinate_longitude - # 004 : fc_build_auxiliary_coordinate_latitude - # 005 : fc_build_coordinate_longitude + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(longitude) + # 004 : fc_build_coordinate_(longitude) + # 005 : fc_build_auxiliary_coordinate_latitude result = self.run_testcase(yco_is_dim=False) self.check_result(result, yco_is_aux=True, yco_no_cs=True) @@ -791,10 +815,10 @@ def test_aux_lat_and_lon(self): # - as in this case there are then no dim-coords to reference it. # # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_latitude_longitude - # 003 : fc_build_auxiliary_coordinate_latitude - # 004 : fc_build_auxiliary_coordinate_longitude + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_build_auxiliary_coordinate_longitude + # 004 : fc_build_auxiliary_coordinate_latitude result = self.run_testcase(xco_is_dim=False, yco_is_dim=False) self.check_result( result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=True @@ -804,11 +828,11 @@ def test_aux_lon_rotated(self): # Same but with rotated-style lat + lon coords. # # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_rotated_latitude_longitude - # 003 : fc_provides_coordinate_latitude - # 004 : fc_build_auxiliary_coordinate_longitude_rotated - # 005 : fc_build_coordinate_latitude_rotated + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_provides_coordinate_(rotated_latitude) + # 004 : fc_build_coordinate_(rotated_latitude)(rotated) + # 005 : fc_build_auxiliary_coordinate_longitude_rotated result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, xco_is_dim=False, @@ -817,11 +841,11 @@ def test_aux_lon_rotated(self): def test_aux_lat_rotated(self): # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_rotated_latitude_longitude - # 003 : fc_provides_coordinate_latitude - # 004 : fc_build_auxiliary_coordinate_longitude_rotated - # 005 : fc_build_coordinate_latitude_rotated + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) + # 003 : fc_provides_coordinate_(rotated_longitude) + # 004 : fc_build_coordinate_(rotated_longitude)(rotated) + # 005 : fc_build_auxiliary_coordinate_latitude_rotated result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, yco_is_dim=False, @@ -843,11 +867,11 @@ def test_nondim_lats(self): # # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_latitude_longitude - # 003 : fc_provides_coordinate_latitude - # 004 : fc_provides_coordinate_longitude - # 005 : fc_build_coordinate_latitude - # 006 : fc_build_coordinate_longitude + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude) + # 006 : fc_build_coordinate_(longitude) # NOTES: # in terms of rule triggers, this is not distinct from a normal case # - but the latitude is now an aux-coord. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py index c4a3b37c01..f9a11ba403 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__hybrid_formulae.py @@ -151,12 +151,10 @@ def test_basic_hybridheight(self): # 003 : fc_build_auxiliary_coordinate # 004 : fc_build_auxiliary_coordinate # 005 : fc_build_auxiliary_coordinate - # 006 : fc_build_auxiliary_coordinate - # 007 : fc_formula_type_atmosphere_hybrid_sigma_pressure_coordinate - # 008 : fc_formula_terms - # 009 : fc_formula_terms - # 010 : fc_formula_terms - # 011 : fc_formula_terms + # 008 : fc_formula_type_atmosphere_hybrid_height_coordinate + # 009 : fc_formula_term(a) + # 010 : fc_formula_term(b) + # 011 : fc_formula_term(orog) result = self.run_testcase() self.check_result(result) @@ -169,9 +167,9 @@ def test_missing_term(self): # 002 : fc_build_auxiliary_coordinate # 003 : fc_build_auxiliary_coordinate # 004 : fc_build_auxiliary_coordinate - # 005 : fc_formula_type_atmosphere_hybrid_height_coordinate - # 006 : fc_formula_terms - # 007 : fc_formula_terms + # 007 : fc_formula_type_atmosphere_hybrid_height_coordinate + # 008 : fc_formula_term(a) + # 009 : fc_formula_term(b) result = self.run_testcase( term_names=["a", "b"] # missing the 'orog' term ) @@ -199,12 +197,13 @@ def test_unrecognised_verticaltype(self): # Set the root variable name to something NOT a recognised hybrid type. # # Rules Triggered: - # 001 : fc_default - # 002 : fc_build_auxiliary_coordinate - # 003 : fc_build_auxiliary_coordinate - # 004 : fc_build_auxiliary_coordinate - # 005 : fc_formula_terms - # 006 : fc_formula_terms + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 007 : fc_formula_type(FAILED - unrecognised formula type = 'unknown') + # 008 : fc_formula_term(a) + # 009 : fc_formula_term(b) result = self.run_testcase( formula_root_name="unknown", term_names=["a", "b"], diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index f0be79d18b..9a2e916380 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -286,8 +286,8 @@ def test_dimension(self): # # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_coordinate_time - # 003 : fc_build_coordinate_time + # 002 : fc_provides_coordinate_(time[[_period]]) + # 003 : fc_build_coordinate_(time[[_period]]) result = self.run_testcase() self.check_result(result, "dim") @@ -296,8 +296,8 @@ def test_dimension_in_phenom_coords(self): # Strictly wrong but a common error in datafiles : must tolerate. # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_coordinate_time - # 003 : fc_build_coordinate_time + # 002 : fc_provides_coordinate_(time[[_period]]) + # 003 : fc_build_coordinate_(time[[_period]]) result = self.run_testcase(in_phenomvar_coords=True) self.check_result(result, "dim") @@ -308,8 +308,8 @@ def test_dim_nonmonotonic(self): # ( Done by the build_coord routine, so not really a rules issue). # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_coordinate_time - # 003 : fc_build_coordinate_time + # 002 : fc_provides_coordinate_(time[[_period]]) + # 003 : fc_build_coordinate_(time[[_period]]) msg = "Failed to create.* dimension coordinate" result = self.run_testcase(values_all_zero=True, warning=msg) self.check_result(result, "aux") @@ -324,8 +324,9 @@ def test_dim_fails_typeident(self): # ( N.B.#2 Not quite the same for lat/lon coords, where coord-specific # 'build' rules always use a fixed standard-name ). # Rules Triggered: - # #001 : fc_default - # #002 : fc_default_coordinate + # 001 : fc_default + # 002 : fc_default_coordinate_(provide-phase) + # 003 : fc_build_coordinate_(miscellaneous) result = self.run_testcase(units="1") self.check_result(result, "dim") @@ -336,7 +337,7 @@ def test_aux(self): # For a valid case, we must *also* have a ref in phenom:coordinates # Rules Triggered: # 001 : fc_default - # 002 : fc_build_auxiliary_coordinate_time + # 002 : fc_build_auxiliary_coordinate_time[[_period]] result = self.run_testcase( coord_dim_name="dim_renamed", dimname="dim_renamed", @@ -434,22 +435,20 @@ def test_time_and_period(self): # Test case with both 'time' and 'period', with separate dims. # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_coordinate_time - # 003 : fc_provides_coordinate_time_period - # 004 : fc_build_coordinate_time - # 005 : fc_build_coordinate_time_period + # 002 : fc_provides_coordinate_(time) + # 003 : fc_provides_coordinate_(time_period) + # 004 : fc_build_coordinate_(time) + # 005 : fc_build_coordinate_(time_period) result = self.run_testcase(time_opts={}, period_opts={}) self.check_result(result, time_is="dim", period_is="dim") def test_time_dim_period_aux(self): # Test case with both 'time' and 'period' sharing a dim. - # Rules Triggered: - # 001 : fc_default - # Rules Triggered: + # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_coordinate_time - # 003 : fc_build_auxiliary_coordinate_time_period - # 004 : fc_build_coordinate_time + # 002 : fc_provides_coordinate_(time) + # 003 : fc_build_coordinate_(time) + # 004 : fc_build_auxiliary_coordinate_time_period result = self.run_testcase( time_opts={}, period_opts=dict( From c87fca8615b7d1809c3322f4e3f7802e27735fcc Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 23 Jun 2021 18:43:06 +0100 Subject: [PATCH 47/53] Fix scope-bleed from actions test debugging, and turn it off. --- .../tests/unit/fileformats/nc_load_rules/actions/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 80b3a0dbef..26f3e7b4bb 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -60,7 +60,7 @@ class Mixin__nc_load_actions: """ # "global" test setting : whether to output various debug info - debug = True + debug = False # whether to perform action in both ways and compare results. @classmethod @@ -97,7 +97,8 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): engine = iris.fileformats.netcdf._actions_engine() # If debug enabled, switch on the activation summary debug output. - iris.fileformats.netcdf.DEBUG = self.debug + # Use 'patch' so it is restored after the test. + self.patch("iris.fileformats.netcdf.DEBUG", self.debug) # Call the main translation function to load a single cube. # _load_cube establishes per-cube facts, activates rules and From b40a446614d469c432aa840303bc51234edf73f4 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 29 Jun 2021 11:02:38 +0100 Subject: [PATCH 48/53] Updated lockfiles. --- requirements/ci/nox.lock/py37-linux-64.lock | 231 ++++++++++++++++++++ requirements/ci/nox.lock/py38-linux-64.lock | 225 +++++++++++++++++++ 2 files changed, 456 insertions(+) create mode 100644 requirements/ci/nox.lock/py37-linux-64.lock create mode 100644 requirements/ci/nox.lock/py38-linux-64.lock diff --git a/requirements/ci/nox.lock/py37-linux-64.lock b/requirements/ci/nox.lock/py37-linux-64.lock new file mode 100644 index 0000000000..b1cc74adba --- /dev/null +++ b/requirements/ci/nox.lock/py37-linux-64.lock @@ -0,0 +1,231 @@ +# platform: linux-64 +# env_hash: 6c711a9771dd36da4e85fae63fd584e6a1bba39d304d13e747db86a4f861804e +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.5.30-ha878542_0.tar.bz2#6a777890e94194dc94a29a76d2a7e721 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.35.1-hea4e1c9_2.tar.bz2#83610dba766a186bdc7a116053b782a4 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-9.3.0-hff62375_19.tar.bz2#c2d8da3cb171e4aa642d20c6e4e42a04 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-9.3.0-h6de172a_19.tar.bz2#cd9a24a8dde03ec0cf0e603b0bea85a1 +https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.25-ha770c72_2.tar.bz2#b1ba065c6d2b9468035472a9d63e5b08 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-9.3.0-hff62375_19.tar.bz2#aea379bd68fdcdf9499fa1453f852ac1 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-9.3.0-h2828fa1_19.tar.bz2#ab0a307912033126da02507b59e79ec9 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-9.3.0-h2828fa1_19.tar.bz2#9d5cdfc51476ee4dcdd96ed2dca3f943 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.17.1-h7f98852_1.tar.bz2#ed1dc233ed5e3eaa9bfbaac64d130c5e +https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.1-h9c3ff4c_0.tar.bz2#16054ef3cb3ec5d8d29d08772662f65d +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.9.1-h9c3ff4c_2.tar.bz2#b9a6d9422aed3ad84ec6ccee9bfcaa0f +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 +https://conda.anaconda.org/conda-forge/linux-64/icu-68.1-h58526e2_0.tar.bz2#fc7a4271dc2a7f4fd78cd63695baf7c3 +https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9d-h36c2ea0_0.tar.bz2#ea02ce6037dbe81803ae6123e5ba1568 +https://conda.anaconda.org/conda-forge/linux-64/lerc-2.2.1-h9c3ff4c_0.tar.bz2#ea833dcaeb9e7ac4fac521f1a7abec82 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.7-h7f98852_5.tar.bz2#10e242842cd30c59c12d79371dc0f583 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.3-h58526e2_2.tar.bz2#665369991d8dd290ac5ee92fce3e6bf5 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 +https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.15-pthreads_h8fe5266_1.tar.bz2#bb5527a16584426a897f22643d9a36a6 +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h58526e2_1007.tar.bz2#7f6569a0c2f27acb8fc90600b382e544 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.0-h7f98852_2.tar.bz2#fb63a035a3b552c88a30d84b89ebf4c4 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_0.tar.bz2#4eb64ee0d5cd43096ffcf843c76b05d4 +https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.2-h846660c_100.tar.bz2#0868d02349fc7e128d4bdc515b58dd7e +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.2-h58526e2_4.tar.bz2#509f2a21c4a09214cd737a480dfd80c9 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.30-h9c3ff4c_0.tar.bz2#e6dc1f8f6e0bcebe8e3d8a5bca258dbe +https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1k-h7f98852_0.tar.bz2#07fae2cb088379c8441e0f3ffa1f4025 +https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 +https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 +https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h516909a_0.tar.bz2#03a530e925414902547cf48da7756db8 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h516909a_1010.tar.bz2#339cc5584e6d26bc73a875ba900028c3 +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h0b5b191_1005.tar.bz2#ff6f69b593a9e74c0e6b61908ac513fa +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-9_openblas.tar.bz2#5f08755e98b2a43ca68124e629a5a0cb +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-hcdb4288_3.tar.bz2#d8f51405997093ff1799ded7650439c4 +https://conda.anaconda.org/conda-forge/linux-64/libllvm11-11.1.0-hf817b99_2.tar.bz2#646fa2f7c60b69ee8f918668e9c2fd31 +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.43.0-h812cca2_0.tar.bz2#1867d1e9658596b3fac8847a7702eef4 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.9.0-ha56f1ee_6.tar.bz2#f0dfb86444df325e599dbc3f4c0a3f5b +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1003.tar.bz2#a9371e9e40aded194dcba1447606c9a1 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h72842e0_0.tar.bz2#bd14fdf5b9ee5568056a40a6a2f41866 +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_0.tar.bz2#2d1b63c574f3e11157a07313e58ba7af +https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.10-h21135ba_1.tar.bz2#c647f70aa7e3d4cc4e029cc1c9a99953 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-h975c496_1.tar.bz2#e663bd5dbc8cc4c1647d9f51cf25872c +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.0-ha95c52a_0.tar.bz2#b56f94865e2de36abf054e7bfa499034 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.1-hcc1bbae_0.tar.bz2#59b0695a515a6c54d45463dbf208ae38 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-9_openblas.tar.bz2#edee85b4f83376ceae81e0975b8bffa2 +https://conda.anaconda.org/conda-forge/linux-64/libclang-11.1.0-default_ha53f305_1.tar.bz2#b9b71585ca4fcb5d442c5a9df5dd7e98 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.68.3-h3e27bee_0.tar.bz2#99416a3287216de097d503b827ad0bde +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-9_openblas.tar.bz2#572d84ab07962986f6dd8e4637a475ca +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-hf544144_1.tar.bz2#a65a4158716bd7d95bfa69bcfd83081c +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.25-hfa10184_2.tar.bz2#5a35fdd2da4c2d5fdf20575d39c232e5 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.36.0-h9cd32fc_0.tar.bz2#d5bbac924cbda57469f43448d5236a50 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.68.3-h9c3ff4c_0.tar.bz2#2e9275303dd09a2e245faf31770a1416 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.4-h76c114f_2.tar.bz2#5db765d4974fa89f64c1544eb2a552cb +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.12-hddcbb42_0.tar.bz2#797117394a4aa588de6d741b06fad80f +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.77.0-h2574ce0_0.tar.bz2#05cf8dca8408b5f1ffcc5e2d5a7c5da2 +https://conda.anaconda.org/conda-forge/linux-64/libpq-13.3-hd57d9b9_0.tar.bz2#66ef2cacc483205b7d303f7b02601c3b +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.0-h3452ae3_0.tar.bz2#8f4e19a8988c38feec7db41bcd0bf0d0 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.67-hb5efdd6_0.tar.bz2#3f2a4bc7d5fded1327ff1b8c61faae53 +https://conda.anaconda.org/conda-forge/linux-64/python-3.7.10-hffdb5ce_100_cpython.tar.bz2#7425fffa658971915f595e9110163c3c +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 +https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h6cf1ce9_1008.tar.bz2#a43fb47d15e116f8be4be7e6b17ab59f +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-1.6.0-py_0.tar.bz2#76d764d8881719e305f6fa368dc2b65e +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f +https://conda.anaconda.org/conda-forge/linux-64/curl-7.77.0-hea6ffbf_0.tar.bz2#7d1168349d6fba67ae1fdf61970b83e1 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.2-pyhd8ed1ab_0.tar.bz2#ae8b866c376568b0342ae2c9b68f1e65 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.0.12-pyh9f0ad1d_0.tar.bz2#7544ed05bbbe9bb687bc9bcbe4d6cb46 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2021.6.1-pyhd8ed1ab_0.tar.bz2#b8dca3cd859c8a849042af6db1cbedca +https://conda.anaconda.org/conda-forge/linux-64/glib-2.68.3-h9c3ff4c_0.tar.bz2#90e989058c8b42e3ddee1560c534313b +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.4-hf529b03_2.tar.bz2#526fadaa13ec264cb919436953bc2766 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.10.6-mpi_mpich_h996c276_1014.tar.bz2#6af2e2e4dfb0ef36c35042cd69a1599d +https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-py_0.tar.bz2#77242bfb1e74a627fb06319b5a2d3b95 +https://conda.anaconda.org/conda-forge/noarch/idna-2.10-pyh9f0ad1d_0.tar.bz2#f95a12b4f435aae6680fe55ae2eb1b06 +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.2.0-py_0.tar.bz2#5879bd2c4b399a5072468e5fe587bf1b +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.3.0-pyh9f0ad1d_0.tar.bz2#e4a33192da1a6dc4967ba18c6c765945 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.2-h78a0170_0.tar.bz2#ac0c23e6f3bbb61569781f00b5666f97 +https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d +https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f +https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 +https://conda.anaconda.org/conda-forge/linux-64/proj-7.2.0-h277dcde_2.tar.bz2#db654ee11298d3463bad67445707654c +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.20-pyh9f0ad1d_2.tar.bz2#aa798d50ffd182a0f6f31478c7f434f6 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-2.4.7-pyh9f0ad1d_0.tar.bz2#626c4f20d5bf06dcec9cf2eaa31725c7 +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.7-2_cp37m.tar.bz2#afff88bf9a7048da740c70aeb8cdbb82 +https://conda.anaconda.org/conda-forge/noarch/pytz-2021.1-pyhd8ed1ab_0.tar.bz2#3af2e9424d5eb0063824a3f9b850d411 +https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.1.0-pyhd8ed1ab_0.tar.bz2#f1d64c0cf0eedf655a96ccdc1573c05a +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_0.tar.bz2#60e630285f44af05767dcb7f473ee03f +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.1-py_0.tar.bz2#d1e66b58cb00b3817ad9f05eec098c00 +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-3.10.0.0-pyha770c72_0.tar.bz2#67c0cba6533b641f28946d7c16f361c8 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.36.2-pyhd3deb0d_0.tar.bz2#768bfbe026426d0e76b377997d1f2b98 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.4.1-pyhd8ed1ab_0.tar.bz2#a4fa30eb74a326092b3d8078b1f1aae1 +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py37h89c1867_1002.tar.bz2#cf3aeeb80dbd517761019a8edcd5b108 +https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e +https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.5.30-py37h89c1867_0.tar.bz2#105f18ae8597a5f4d4e3188bcb06c796 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.14.5-py37hc58025e_0.tar.bz2#e05f1fad0c52c21b6b92778d31f89cd0 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.0-pyhd8ed1ab_0.tar.bz2#a739adbf102868f675bf70601e0af7ea +https://conda.anaconda.org/conda-forge/linux-64/chardet-4.0.0-py37h89c1867_1.tar.bz2#f4fbd4721b80f0d6b53b3a3374914068 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.10.0-py_2.tar.bz2#f6d7c7e6d8f42cbbec7e07a8d879f91c +https://conda.anaconda.org/conda-forge/linux-64/cython-0.29.23-py37hcd2ae1e_1.tar.bz2#c6a51028408bcb1a32dac34fb28b2a2e +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.11.0-py37h5e8e339_3.tar.bz2#2e89a6f3baf5eeb13763f61ea3d0601f +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h48d8840_2.tar.bz2#eba672c69baf366fdedd1c6f702dbb81 +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.16-py37h89c1867_3.tar.bz2#3da23bcf1d502670cec18fd3a04f409b +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-2.8.1-h83ec7ef_0.tar.bz2#654935b08e8bd4a8cbf6a4253e290c04 +https://conda.anaconda.org/conda-forge/linux-64/importlib-metadata-4.6.0-py37h89c1867_0.tar.bz2#6d600925b3ec1d7bf9517eacfa839bd0 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.1-py37h2527ec5_1.tar.bz2#61149814e0ea71cb5b44881c65d25f7b +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.0-mpi_mpich_hf07302c_2.tar.bz2#d76a3f327eb8e26b5ce6b042ac1abeb3 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py37h5e8e339_0.tar.bz2#90ad307f6997784664de956e09ec689e +https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.0.3-py37h1e5cb63_7.tar.bz2#1c0450be22dc0fbffaabab1f415705d5 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.2-py37h2527ec5_1.tar.bz2#07952b04eee02d873daa311a35b27454 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.21.0-py37h038b26d_0.tar.bz2#bfafd109a1e559dcadd211cd6bc3d298 +https://conda.anaconda.org/conda-forge/noarch/packaging-20.9-pyh44b312d_0.tar.bz2#be69a38e912054a62dc82cc3c7711a64 +https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 +https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.2-py37h718be6c_0.tar.bz2#ecac4e308b87ff93d44ea5e56ab39084 +https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.8.0-py37h5e8e339_1.tar.bz2#2923250371b05e798f3732531cdb5300 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py37hcd2ae1e_7.tar.bz2#f94e01aa4abd458b556d68fdb5f19b99 +https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py37h89c1867_3.tar.bz2#bd069d59ee91a2e26552cd7bb4c64032 +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.1-py_0.tar.bz2#0d0150ed9c2d25817f5324108d3f7571 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py37h5e8e339_0.tar.bz2#3f1e633378cd3c8b1ce13b3f2f5eadd7 +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-5.4.1-py37h5e8e339_0.tar.bz2#090550b9425fe9a87dc1ec7fde201633 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py37h5e8e339_1.tar.bz2#92449128c4639feae48d731ef2186099 +https://conda.anaconda.org/conda-forge/noarch/zict-2.0.0-py_0.tar.bz2#4750152be22f24d695b3004c5e1712d3 +https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py37h5e8e339_1001.tar.bz2#871eed4ba322e7b3f200956a096b34e7 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.2.1-py37h161383b_1.tar.bz2#314ca8b00ed742f8c46a6cc68d84d90f +https://conda.anaconda.org/conda-forge/linux-64/click-8.0.1-py37h89c1867_0.tar.bz2#bb1ad97b5d8626f662b753f620c3c913 +https://conda.anaconda.org/conda-forge/linux-64/cryptography-3.4.7-py37h5d9358c_0.tar.bz2#d811fb6a96ae0cf8c0a17457a8e67ff4 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2021.6.2-pyhd8ed1ab_0.tar.bz2#a5a365e004f7cb59d652254800cc40b7 +https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py37h2527ec5_1.tar.bz2#100918f43247cedad74f2cf8dcbda5bc +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-4.6.0-hd8ed1ab_0.tar.bz2#bb345f822c508e2bc5138c975667256c +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.1-pyhd8ed1ab_0.tar.bz2#c647e77921fd3e245cdcc5b2d451a0f8 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py37h902c9e0_1005.tar.bz2#40db532422636dd1e980154114486a00 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.3-mpi_mpich_h196b126_4.tar.bz2#e058f42a78ea8c965cf7335e28143c59 +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.2.5-py37h219a48f_0.tar.bz2#729b59cb1fe712a4251aba132b087eb6 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.5-hb8ff022_0.tar.bz2#f4e263c4dfa15b6a97349782793d1ee7 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.1.1-py37h6f94858_1004.tar.bz2#42b37830a63405589fef3d13db505e7d +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.1.1-py37h902c9e0_3.tar.bz2#104648a5a091a493046a62704eef5c49 +https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.0-py37h29e03ee_0.tar.bz2#685172ce967c6877e22c1a8907366267 +https://conda.anaconda.org/conda-forge/linux-64/setuptools-49.6.0-py37h89c1867_3.tar.bz2#928c178bf6805b8ab71fabaa620e0234 +https://conda.anaconda.org/conda-forge/linux-64/shapely-1.7.1-py37h2d1e849_5.tar.bz2#451beb59aca4c165e68fbe8be3a37149 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 +https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py37h89c1867_0.tar.bz2#50087f16f1a71581a0327956c80debc2 +https://conda.anaconda.org/conda-forge/linux-64/asv-0.4.2-py37hcd2ae1e_2.tar.bz2#a539a23d322e3976dda4af86e59b31ce +https://conda.anaconda.org/conda-forge/linux-64/bokeh-2.1.1-py37hc8dfbb8_0.tar.bz2#0927f1a093279ba797f014c5e484a58f +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.0-py37h6f94858_0.tar.bz2#cb9e2ae2948058a7c7b249e3a326692e +https://conda.anaconda.org/conda-forge/linux-64/distributed-2021.6.2-py37h89c1867_0.tar.bz2#c92280f3760f096f164808d8d745d0b1 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.1.1-mpi_mpich_h3dcaa78_100.tar.bz2#5b4bab1017226f2c03ba0fe02b783316 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h539f30e_1.tar.bz2#606777b4da3664d5c9415f5f165349fd +https://conda.anaconda.org/conda-forge/noarch/identify-2.2.10-pyhd8ed1ab_0.tar.bz2#1f9cd027f471e98e21d9740472b18096 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.0-pyhd8ed1ab_0.tar.bz2#e5a77472ae964f2835fce16355bbfe64 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.50.7-hc3c00ef_0.tar.bz2#63fb96444e336b3d937921223dd9a481 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.4.2-py37hdd32ed1_0.tar.bz2#ee755b80aae171058a46c5d7badd08ff +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.7-nompi_py37h946d57d_100.tar.bz2#217487caeb2c4cecb25f86d99cbe53b6 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 +https://conda.anaconda.org/conda-forge/noarch/pip-21.1.3-pyhd8ed1ab_0.tar.bz2#231bd0af116f55ca4d17ea0869415fdf +https://conda.anaconda.org/conda-forge/noarch/pygments-2.9.0-pyhd8ed1ab_0.tar.bz2#a2d9bba43c9b80a42b0ccb9afd7223c2 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-20.0.1-pyhd8ed1ab_0.tar.bz2#92371c25994d0f5d28a01c1fb75ebf86 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py37he336c9b_7.tar.bz2#303251d6f2b9e60a0cd79480cf8507d2 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.19.0.post1-py37h0c48da3_0.tar.bz2#44add1e9550c8caf69ce64561ce73035 +https://conda.anaconda.org/conda-forge/noarch/dask-2021.6.2-pyhd8ed1ab_0.tar.bz2#138fd8d4293eba5dcfe8448ec54f09f2 +https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.1.1-mpi_mpich_py37hf719a8e_100.tar.bz2#d608536dd44b60da923950c60619583d +https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.47.3-h85b4f2f_0.tar.bz2#099cc43ac1c5bcce50318a9fc14a1d49 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.2.0-pyhd8ed1ab_2.tar.bz2#ca7d092db9ebbd9a0102710e8d4b4a28 +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.13.0-py37h89c1867_0.tar.bz2#a2ddf76626c4e7481f106fa08d5d77c5 +https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py37he336c9b_7.tar.bz2#2b1959f3a87b5ad66690340ef921323c +https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py37he336c9b_7.tar.bz2#15f5cbcafb4889bb41da2a0a0e338f2a +https://conda.anaconda.org/conda-forge/noarch/pyugrid-0.3.1-py_2.tar.bz2#7d7361886fbcf2be663fd185bf6d244d +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.6-pyhd8ed1ab_0.tar.bz2#dea5b6d93cfbfbc2a253168ad05b3f89 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py37h89c1867_7.tar.bz2#1754ec587a9ac26e9507fea7eb6bebc2 +https://conda.anaconda.org/conda-forge/noarch/requests-2.25.1-pyhd3deb0d_0.tar.bz2#ae687aba31a1c400192a86a2e993ffdc +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.4.2-py37h89c1867_0.tar.bz2#581de64cb6a7577b162e329efbcf1e4c +https://conda.anaconda.org/conda-forge/noarch/sphinx-3.5.4-pyh44b312d_0.tar.bz2#0ebc444f001f73c4f6de01057b0be392 +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.3.3-pyhd8ed1ab_0.tar.bz2#b066335fac136c776b7a441e35c1fcb2 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.9.0-pyhd8ed1ab_0.tar.bz2#5ef222a3e1b5904742e376e05046692b +https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.5.2-pyhd3deb0d_0.tar.bz2#1a871a63c4be1bd47a7aa48b7417a426 +https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-0.5.2-pyhd8ed1ab_1.tar.bz2#7434e891fc767cb0d39d90751720c8ec diff --git a/requirements/ci/nox.lock/py38-linux-64.lock b/requirements/ci/nox.lock/py38-linux-64.lock new file mode 100644 index 0000000000..55ef7d7723 --- /dev/null +++ b/requirements/ci/nox.lock/py38-linux-64.lock @@ -0,0 +1,225 @@ +# platform: linux-64 +# env_hash: fbb4556337a9f497fb9d021a1e2d2e8eae9f52dfc1d7859ee0f63d66e1842b0c +@EXPLICIT +https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2021.5.30-ha878542_0.tar.bz2#6a777890e94194dc94a29a76d2a7e721 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2#19410c3df09dfb12d1206132a1d357c5 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.35.1-hea4e1c9_2.tar.bz2#83610dba766a186bdc7a116053b782a4 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-9.3.0-hff62375_19.tar.bz2#c2d8da3cb171e4aa642d20c6e4e42a04 +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-9.3.0-h6de172a_19.tar.bz2#cd9a24a8dde03ec0cf0e603b0bea85a1 +https://conda.anaconda.org/conda-forge/linux-64/mpi-1.0-mpich.tar.bz2#c1fcff3417b5a22bbc4cf6e8c23648cf +https://conda.anaconda.org/conda-forge/linux-64/mysql-common-8.0.25-ha770c72_2.tar.bz2#b1ba065c6d2b9468035472a9d63e5b08 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-9.3.0-hff62375_19.tar.bz2#aea379bd68fdcdf9499fa1453f852ac1 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-9.3.0-h2828fa1_19.tar.bz2#ab0a307912033126da02507b59e79ec9 +https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-1_gnu.tar.bz2#561e277319a41d4f24f5c05a9ef63c04 +https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-9.3.0-h2828fa1_19.tar.bz2#9d5cdfc51476ee4dcdd96ed2dca3f943 +https://conda.anaconda.org/conda-forge/linux-64/alsa-lib-1.2.3-h516909a_0.tar.bz2#1378b88874f42ac31b2f8e4f6975cb7b +https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2#a1fd65c7ccbf10880423d82bca54eb54 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.17.1-h7f98852_1.tar.bz2#ed1dc233ed5e3eaa9bfbaac64d130c5e +https://conda.anaconda.org/conda-forge/linux-64/expat-2.4.1-h9c3ff4c_0.tar.bz2#16054ef3cb3ec5d8d29d08772662f65d +https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.9.1-h9c3ff4c_2.tar.bz2#b9a6d9422aed3ad84ec6ccee9bfcaa0f +https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h36c2ea0_2.tar.bz2#626e68ae9cc5912d6adb79d318cf962d +https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h58526e2_1001.tar.bz2#8c54672728e8ec6aa6db90cf2806d220 +https://conda.anaconda.org/conda-forge/linux-64/icu-68.1-h58526e2_0.tar.bz2#fc7a4271dc2a7f4fd78cd63695baf7c3 +https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf +https://conda.anaconda.org/conda-forge/linux-64/jpeg-9d-h36c2ea0_0.tar.bz2#ea02ce6037dbe81803ae6123e5ba1568 +https://conda.anaconda.org/conda-forge/linux-64/lerc-2.2.1-h9c3ff4c_0.tar.bz2#ea833dcaeb9e7ac4fac521f1a7abec82 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.7-h7f98852_5.tar.bz2#10e242842cd30c59c12d79371dc0f583 +https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2#6f8720dff19e17ce5d48cfe7f3d2f0a3 +https://conda.anaconda.org/conda-forge/linux-64/libffi-3.3-h58526e2_2.tar.bz2#665369991d8dd290ac5ee92fce3e6bf5 +https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.16-h516909a_0.tar.bz2#5c0f338a513a2943c659ae619fca9211 +https://conda.anaconda.org/conda-forge/linux-64/libmo_unpack-3.1.2-hf484d3e_1001.tar.bz2#95f32a6a5a666d33886ca5627239f03d +https://conda.anaconda.org/conda-forge/linux-64/libogg-1.3.4-h7f98852_1.tar.bz2#6e8cc2173440d77708196c5b93771680 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.15-pthreads_h8fe5266_1.tar.bz2#bb5527a16584426a897f22643d9a36a6 +https://conda.anaconda.org/conda-forge/linux-64/libopus-1.3.1-h7f98852_1.tar.bz2#15345e56d527b330e1cacbdf58676e8f +https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.6-h58526e2_1007.tar.bz2#7f6569a0c2f27acb8fc90600b382e544 +https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.32.1-h7f98852_1000.tar.bz2#772d69f030955d9646d3d0eaf21d859d +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.2.0-h7f98852_2.tar.bz2#fb63a035a3b552c88a30d84b89ebf4c4 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.3-h9c3ff4c_0.tar.bz2#4eb64ee0d5cd43096ffcf843c76b05d4 +https://conda.anaconda.org/conda-forge/linux-64/mpich-3.4.2-h846660c_100.tar.bz2#0868d02349fc7e128d4bdc515b58dd7e +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.2-h58526e2_4.tar.bz2#509f2a21c4a09214cd737a480dfd80c9 +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.30-h9c3ff4c_0.tar.bz2#e6dc1f8f6e0bcebe8e3d8a5bca258dbe +https://conda.anaconda.org/conda-forge/linux-64/openssl-1.1.1k-h7f98852_0.tar.bz2#07fae2cb088379c8441e0f3ffa1f4025 +https://conda.anaconda.org/conda-forge/linux-64/pcre-8.45-h9c3ff4c_0.tar.bz2#c05d1820a6d34ff07aaaab7a9b7eddaa +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2#660e72c82f2e75a6b3fe6a6e75c79f19 +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 +https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.0.10-h7f98852_0.tar.bz2#d6b0b50b49eccfe0be0373be628be0f3 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.9-h7f98852_0.tar.bz2#bf6f803a544f26ebbdc3bfff272eb179 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 +https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h7f98852_1002.tar.bz2#1e15f6ad85a7d743a2ac68dae6c82b98 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 +https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.0-h7f98852_3.tar.bz2#52402c791f35e414e704b7a113f99605 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.5-h516909a_1.tar.bz2#33f601066901f3e1a85af3522a8113f9 +https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h516909a_0.tar.bz2#03a530e925414902547cf48da7756db8 +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.11-h516909a_1010.tar.bz2#339cc5584e6d26bc73a875ba900028c3 +https://conda.anaconda.org/conda-forge/linux-64/gettext-0.19.8.1-h0b5b191_1005.tar.bz2#ff6f69b593a9e74c0e6b61908ac513fa +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h10796ff_3.tar.bz2#21a8d66dc17f065023b33145c42652fe +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-9_openblas.tar.bz2#5f08755e98b2a43ca68124e629a5a0cb +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.10-hcdb4288_3.tar.bz2#d8f51405997093ff1799ded7650439c4 +https://conda.anaconda.org/conda-forge/linux-64/libllvm11-11.1.0-hf817b99_2.tar.bz2#646fa2f7c60b69ee8f918668e9c2fd31 +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.43.0-h812cca2_0.tar.bz2#1867d1e9658596b3fac8847a7702eef4 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.37-h21135ba_2.tar.bz2#b6acf807307d033d4b7e758b4f44b036 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.9.0-ha56f1ee_6.tar.bz2#f0dfb86444df325e599dbc3f4c0a3f5b +https://conda.anaconda.org/conda-forge/linux-64/libvorbis-1.3.7-h9c3ff4c_0.tar.bz2#309dec04b70a3cc0f1e84a4013683bc0 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.13-h7f98852_1003.tar.bz2#a9371e9e40aded194dcba1447606c9a1 +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.9.12-h72842e0_0.tar.bz2#bd14fdf5b9ee5568056a40a6a2f41866 +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.8.0-h4de3113_0.tar.bz2#2d1b63c574f3e11157a07313e58ba7af +https://conda.anaconda.org/conda-forge/linux-64/readline-8.1-h46c0cb4_0.tar.bz2#5788de3c8d7a7d64ac56c784c4ef48e6 +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.10-h21135ba_1.tar.bz2#c647f70aa7e3d4cc4e029cc1c9a99953 +https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.27.27-h975c496_1.tar.bz2#e663bd5dbc8cc4c1647d9f51cf25872c +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.3-hd9c2040_1000.tar.bz2#9e856f78d5c80d5a78f61e72d1d473a3 +https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.0-ha95c52a_0.tar.bz2#b56f94865e2de36abf054e7bfa499034 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.10.4-h0708190_1.tar.bz2#4a06f2ac2e5bfae7b6b245171c3f07aa +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.19.1-hcc1bbae_0.tar.bz2#59b0695a515a6c54d45463dbf208ae38 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-9_openblas.tar.bz2#edee85b4f83376ceae81e0975b8bffa2 +https://conda.anaconda.org/conda-forge/linux-64/libclang-11.1.0-default_ha53f305_1.tar.bz2#b9b71585ca4fcb5d442c5a9df5dd7e98 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.68.3-h3e27bee_0.tar.bz2#99416a3287216de097d503b827ad0bde +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-9_openblas.tar.bz2#572d84ab07962986f6dd8e4637a475ca +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.3.0-hf544144_1.tar.bz2#a65a4158716bd7d95bfa69bcfd83081c +https://conda.anaconda.org/conda-forge/linux-64/libxkbcommon-1.0.3-he3ba5ed_0.tar.bz2#f9dbabc7e01c459ed7a1d1d64b206e9b +https://conda.anaconda.org/conda-forge/linux-64/mysql-libs-8.0.25-hfa10184_2.tar.bz2#5a35fdd2da4c2d5fdf20575d39c232e5 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.36.0-h9cd32fc_0.tar.bz2#d5bbac924cbda57469f43448d5236a50 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.7.2-h7f98852_0.tar.bz2#12a61e640b8894504326aadafccbb790 +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.36.0-h3371d22_4.tar.bz2#661e1ed5d92552785d9f8c781ce68685 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.13.1-hba837de_1005.tar.bz2#fd3611672eb91bc9d24fd6fb970037eb +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.6-h04a7f16_0.tar.bz2#b24a1e18325a6e8f8b6b4a2ec5860ce2 +https://conda.anaconda.org/conda-forge/linux-64/glib-tools-2.68.3-h9c3ff4c_0.tar.bz2#2e9275303dd09a2e245faf31770a1416 +https://conda.anaconda.org/conda-forge/linux-64/gstreamer-1.18.4-h76c114f_2.tar.bz2#5db765d4974fa89f64c1544eb2a552cb +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h64030ff_2.tar.bz2#112eb9b5b93f0c02e59aea4fd1967363 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-7.77.0-h2574ce0_0.tar.bz2#05cf8dca8408b5f1ffcc5e2d5a7c5da2 +https://conda.anaconda.org/conda-forge/linux-64/libpq-13.3-hd57d9b9_0.tar.bz2#66ef2cacc483205b7d303f7b02601c3b +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.2.0-h3452ae3_0.tar.bz2#8f4e19a8988c38feec7db41bcd0bf0d0 +https://conda.anaconda.org/conda-forge/linux-64/nss-3.67-hb5efdd6_0.tar.bz2#3f2a4bc7d5fded1327ff1b8c61faae53 +https://conda.anaconda.org/conda-forge/linux-64/python-3.8.10-h49503c6_1_cpython.tar.bz2#69f7d6ef1f00c3a109b1b06279e6d6a9 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h7f98852_1.tar.bz2#536cc5db4d0a3ba0630541aec064b5e4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.10-h7f98852_1003.tar.bz2#f59c1242cc1dd93e72c2ee2b360979eb +https://conda.anaconda.org/conda-forge/noarch/alabaster-0.7.12-py_0.tar.bz2#2489a97287f90176ecdc3ca982b4b0a0 +https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2#5f095bc6454094e96f146491fd03633b +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h6cf1ce9_1008.tar.bz2#a43fb47d15e116f8be4be7e6b17ab59f +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-1.6.0-py_0.tar.bz2#76d764d8881719e305f6fa368dc2b65e +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.4-pyh9f0ad1d_0.tar.bz2#c08b4c1326b880ed44f3ffb04803332f +https://conda.anaconda.org/conda-forge/linux-64/curl-7.77.0-hea6ffbf_0.tar.bz2#7d1168349d6fba67ae1fdf61970b83e1 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.2-pyhd8ed1ab_0.tar.bz2#ae8b866c376568b0342ae2c9b68f1e65 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.0.12-pyh9f0ad1d_0.tar.bz2#7544ed05bbbe9bb687bc9bcbe4d6cb46 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2021.6.1-pyhd8ed1ab_0.tar.bz2#b8dca3cd859c8a849042af6db1cbedca +https://conda.anaconda.org/conda-forge/linux-64/glib-2.68.3-h9c3ff4c_0.tar.bz2#90e989058c8b42e3ddee1560c534313b +https://conda.anaconda.org/conda-forge/linux-64/gst-plugins-base-1.18.4-hf529b03_2.tar.bz2#526fadaa13ec264cb919436953bc2766 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.10.6-mpi_mpich_h996c276_1014.tar.bz2#6af2e2e4dfb0ef36c35042cd69a1599d +https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-py_0.tar.bz2#77242bfb1e74a627fb06319b5a2d3b95 +https://conda.anaconda.org/conda-forge/noarch/idna-2.10-pyh9f0ad1d_0.tar.bz2#f95a12b4f435aae6680fe55ae2eb1b06 +https://conda.anaconda.org/conda-forge/noarch/imagesize-1.2.0-py_0.tar.bz2#5879bd2c4b399a5072468e5fe587bf1b +https://conda.anaconda.org/conda-forge/noarch/iris-sample-data-2.3.0-pyh9f0ad1d_0.tar.bz2#e4a33192da1a6dc4967ba18c6c765945 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.2-h78a0170_0.tar.bz2#ac0c23e6f3bbb61569781f00b5666f97 +https://conda.anaconda.org/conda-forge/noarch/locket-0.2.0-py_2.tar.bz2#709e8671651c7ec3d1ad07800339ff1d +https://conda.anaconda.org/conda-forge/noarch/nose-1.3.7-py_1006.tar.bz2#382019d5f8e9362ef6f60a8d4e7bce8f +https://conda.anaconda.org/conda-forge/noarch/olefile-0.46-pyh9f0ad1d_1.tar.bz2#0b2e68acc8c78c8cc392b90983481f58 +https://conda.anaconda.org/conda-forge/linux-64/proj-7.2.0-h277dcde_2.tar.bz2#db654ee11298d3463bad67445707654c +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.20-pyh9f0ad1d_2.tar.bz2#aa798d50ffd182a0f6f31478c7f434f6 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-2.4.7-pyh9f0ad1d_0.tar.bz2#626c4f20d5bf06dcec9cf2eaa31725c7 +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.1.3-pyh44b312d_0.tar.bz2#2d1867b980785eb44b8122184d8b42a6 +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.8-2_cp38.tar.bz2#bfbb29d517281e78ac53e48d21e6e860 +https://conda.anaconda.org/conda-forge/noarch/pytz-2021.1-pyhd8ed1ab_0.tar.bz2#3af2e9424d5eb0063824a3f9b850d411 +https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.1.0-pyhd8ed1ab_0.tar.bz2#f1d64c0cf0eedf655a96ccdc1573c05a +https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-1.0.2-py_0.tar.bz2#20b2eaeaeea4ef9a9a0d99770620fd09 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-1.0.2-py_0.tar.bz2#68e01cac9d38d0e717cd5c87bc3d2cc9 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.0.0-pyhd8ed1ab_0.tar.bz2#77dad82eb9c8c1525ff7953e0756d708 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-py_0.tar.bz2#67cd9d9c0382d37479b4d306c369a2d4 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-1.0.3-py_0.tar.bz2#d01180388e6d1838c3e1ad029590aa7a +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.5-pyhd8ed1ab_0.tar.bz2#60e630285f44af05767dcb7f473ee03f +https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2#3d4afc31302aa7be471feb6be048ed76 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 +https://conda.anaconda.org/conda-forge/noarch/toolz-0.11.1-py_0.tar.bz2#d1e66b58cb00b3817ad9f05eec098c00 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.36.2-pyhd3deb0d_0.tar.bz2#768bfbe026426d0e76b377997d1f2b98 +https://conda.anaconda.org/conda-forge/linux-64/antlr-python-runtime-4.7.2-py38h578d9bd_1002.tar.bz2#2b2207e2c8a05fc0bc5b62fc32c355e6 +https://conda.anaconda.org/conda-forge/noarch/babel-2.9.1-pyh44b312d_0.tar.bz2#74136ed39bfea0832d338df1e58d013e +https://conda.anaconda.org/conda-forge/linux-64/certifi-2021.5.30-py38h578d9bd_0.tar.bz2#a2e14464711f8e76010cd7e0c49bc4ae +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.14.5-py38ha65f79e_0.tar.bz2#386057f231a571b75bfa7307c9acd5f6 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.0-pyhd8ed1ab_0.tar.bz2#a739adbf102868f675bf70601e0af7ea +https://conda.anaconda.org/conda-forge/linux-64/chardet-4.0.0-py38h578d9bd_1.tar.bz2#9294a5e2c7545a2f67ac348aadd53344 +https://conda.anaconda.org/conda-forge/linux-64/click-8.0.1-py38h578d9bd_0.tar.bz2#45426acde32f0ddd94dcee3478fd13e3 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.10.0-py_2.tar.bz2#f6d7c7e6d8f42cbbec7e07a8d879f91c +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.11.0-py38h497a2fe_3.tar.bz2#45568bae22c3825f22b631101ecbad35 +https://conda.anaconda.org/conda-forge/linux-64/dbus-1.13.6-h48d8840_2.tar.bz2#eba672c69baf366fdedd1c6f702dbb81 +https://conda.anaconda.org/conda-forge/linux-64/docutils-0.15.2-py38h578d9bd_2.tar.bz2#a62e348e72a6122a17d2146d8c5eb4e1 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-2.8.1-h83ec7ef_0.tar.bz2#654935b08e8bd4a8cbf6a4253e290c04 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.3.1-py38h1fd1430_1.tar.bz2#01488c80daae318ed5c17e7bb12af64e +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.8.0-mpi_mpich_hf07302c_2.tar.bz2#d76a3f327eb8e26b5ce6b042ac1abeb3 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.0.1-py38h497a2fe_0.tar.bz2#d075babffd68330d81b0488a45435698 +https://conda.anaconda.org/conda-forge/linux-64/mpi4py-3.0.3-py38he865349_7.tar.bz2#afbbb1e0ce578e537b2ec82563988417 +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.0.2-py38h1fd1430_1.tar.bz2#5854c568e0d341313fb0a6487f1c687e +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.21.0-py38h9894fe3_0.tar.bz2#10aebb0451bda480ef14a25657c576f3 +https://conda.anaconda.org/conda-forge/noarch/packaging-20.9-pyh44b312d_0.tar.bz2#be69a38e912054a62dc82cc3c7711a64 +https://conda.anaconda.org/conda-forge/noarch/partd-1.2.0-pyhd8ed1ab_0.tar.bz2#0c32f563d7f22e3a34c95cad8cc95651 +https://conda.anaconda.org/conda-forge/linux-64/pillow-6.2.1-py38hd70f55b_1.tar.bz2#80d719bee2b77a106b199150c0829107 +https://conda.anaconda.org/conda-forge/noarch/pockets-0.9.1-py_0.tar.bz2#1b52f0c42e8077e5a33e00fe72269364 +https://conda.anaconda.org/conda-forge/linux-64/psutil-5.8.0-py38h497a2fe_1.tar.bz2#3c465545aa3cec37f8f1341546677956 +https://conda.anaconda.org/conda-forge/linux-64/pyqt5-sip-4.19.18-py38h709712a_7.tar.bz2#e012838bbbe92f6a458c2584634830f1 +https://conda.anaconda.org/conda-forge/linux-64/pysocks-1.7.1-py38h578d9bd_3.tar.bz2#8284bab4783fd6fdd11b695958945614 +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.1-py_0.tar.bz2#0d0150ed9c2d25817f5324108d3f7571 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-2.0.2-py38h497a2fe_0.tar.bz2#27b37e3f79205080b573442445ed727b +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-5.4.1-py38h497a2fe_0.tar.bz2#36d6e06148013694eb943576cd305f67 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.1-py38h497a2fe_1.tar.bz2#e772c8383768280af283e814e2126663 +https://conda.anaconda.org/conda-forge/linux-64/virtualenv-20.4.7-py38h578d9bd_0.tar.bz2#24b5e0295c43de15a51afb00f93a41de +https://conda.anaconda.org/conda-forge/noarch/zict-2.0.0-py_0.tar.bz2#4750152be22f24d695b3004c5e1712d3 +https://conda.anaconda.org/conda-forge/linux-64/brotlipy-0.7.0-py38h497a2fe_1001.tar.bz2#56753dd777a6517b34966ddcb39af734 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.2.1-py38hab2c0dc_1.tar.bz2#777186ded2d850f3eab4ce7131c6c17c +https://conda.anaconda.org/conda-forge/linux-64/cryptography-3.4.7-py38ha5dfef3_0.tar.bz2#a8b014aba670157256dabdc885f71af4 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2021.6.2-pyhd8ed1ab_0.tar.bz2#a5a365e004f7cb59d652254800cc40b7 +https://conda.anaconda.org/conda-forge/linux-64/editdistance-s-1.0.0-py38h1fd1430_1.tar.bz2#03bbd69539712a691b0a43bd4a49976e +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.0.1-pyhd8ed1ab_0.tar.bz2#c647e77921fd3e245cdcc5b2d451a0f8 +https://conda.anaconda.org/conda-forge/linux-64/mo_pack-0.2.0-py38h5c078b8_1005.tar.bz2#d318a411c4cb595d5adb60ec7b4a46f0 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.5.3-mpi_mpich_h196b126_4.tar.bz2#e058f42a78ea8c965cf7335e28143c59 +https://conda.anaconda.org/conda-forge/linux-64/pandas-1.2.5-py38h1abd341_0.tar.bz2#b7c0ddb0b4a016268bd915d8fb55693f +https://conda.anaconda.org/conda-forge/linux-64/pango-1.48.5-hb8ff022_0.tar.bz2#f4e263c4dfa15b6a97349782793d1ee7 +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.2.post0-py38hb5d20a5_0.tar.bz2#cc6852249c01884469560082943b689f +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.1.1-py38h5c078b8_3.tar.bz2#dafeef887e68bd18ec84681747ca0fd5 +https://conda.anaconda.org/conda-forge/linux-64/qt-5.12.9-hda022c4_4.tar.bz2#afebab1f5049d66baaaec67d9ce893f0 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.7.0-py38h7b17777_0.tar.bz2#0b7f0bd8baf6557f140e0f634e90d067 +https://conda.anaconda.org/conda-forge/linux-64/setuptools-49.6.0-py38h578d9bd_3.tar.bz2#59c561cd1be0db9cf1c83f7d7cc74f4d +https://conda.anaconda.org/conda-forge/linux-64/shapely-1.7.1-py38haeee4fe_5.tar.bz2#2e633d8e2257f3c0e465c858ce2ddbc6 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-napoleon-0.7-py_0.tar.bz2#0bc25ff6f2e34af63ded59692df5f749 +https://conda.anaconda.org/conda-forge/linux-64/asv-0.4.2-py38h709712a_2.tar.bz2#4659f315fc42e671606fbcd1b9234f75 +https://conda.anaconda.org/conda-forge/linux-64/bokeh-1.4.0-py38h32f6830_1.tar.bz2#7074fc3ef551c1aa1e10393436de021c +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.0.0-py38hb5d20a5_0.tar.bz2#4de86c142bd3846d95d11b32567ca503 +https://conda.anaconda.org/conda-forge/linux-64/distributed-2021.6.2-py38h578d9bd_0.tar.bz2#87cccb37c2a1e0ec30d3715431dee080 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.1.1-mpi_mpich_h3dcaa78_100.tar.bz2#5b4bab1017226f2c03ba0fe02b783316 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h539f30e_1.tar.bz2#606777b4da3664d5c9415f5f165349fd +https://conda.anaconda.org/conda-forge/noarch/identify-2.2.10-pyhd8ed1ab_0.tar.bz2#1f9cd027f471e98e21d9740472b18096 +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.2.0-pyhd8ed1ab_0.tar.bz2#e5a77472ae964f2835fce16355bbfe64 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.50.7-hc3c00ef_0.tar.bz2#63fb96444e336b3d937921223dd9a481 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.4.2-py38hcc49a3a_0.tar.bz2#4bfb6818a1fce6d4129fdf121f788505 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.5.7-nompi_py38h5e9db54_100.tar.bz2#5f86dd7381e37db378068abd7707cd57 +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.6.0-pyhd8ed1ab_0.tar.bz2#0941325bf48969e2b3b19d0951740950 +https://conda.anaconda.org/conda-forge/noarch/pip-21.1.3-pyhd8ed1ab_0.tar.bz2#231bd0af116f55ca4d17ea0869415fdf +https://conda.anaconda.org/conda-forge/noarch/pygments-2.9.0-pyhd8ed1ab_0.tar.bz2#a2d9bba43c9b80a42b0ccb9afd7223c2 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-20.0.1-pyhd8ed1ab_0.tar.bz2#92371c25994d0f5d28a01c1fb75ebf86 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-impl-5.12.3-py38h7400c14_7.tar.bz2#8fe28c949b01e3d69c2b357b5abf3916 +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.19.0.post1-py38hc9c980b_0.tar.bz2#65e97172e139d3465895eb07a1fd52f2 +https://conda.anaconda.org/conda-forge/noarch/dask-2021.6.2-pyhd8ed1ab_0.tar.bz2#138fd8d4293eba5dcfe8448ec54f09f2 +https://conda.anaconda.org/conda-forge/linux-64/esmpy-8.1.1-mpi_mpich_py38h7f78e9f_100.tar.bz2#ce0ac0d6f5e6c5e7e0c613b08b3a0960 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-2.47.3-h85b4f2f_0.tar.bz2#099cc43ac1c5bcce50318a9fc14a1d49 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.2.0-pyhd8ed1ab_2.tar.bz2#ca7d092db9ebbd9a0102710e8d4b4a28 +https://conda.anaconda.org/conda-forge/linux-64/pre-commit-2.13.0-py38h578d9bd_0.tar.bz2#1013dff06f574377c64f11efa7e2c016 +https://conda.anaconda.org/conda-forge/linux-64/pyqtchart-5.12-py38h7400c14_7.tar.bz2#3003444b4f41742a33b7afdeb3260cbc +https://conda.anaconda.org/conda-forge/linux-64/pyqtwebengine-5.12.1-py38h7400c14_7.tar.bz2#1c17944e118b314ff4d0bfc05f03a5e1 +https://conda.anaconda.org/conda-forge/noarch/pyugrid-0.3.1-py_2.tar.bz2#7d7361886fbcf2be663fd185bf6d244d +https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.6-pyhd8ed1ab_0.tar.bz2#dea5b6d93cfbfbc2a253168ad05b3f89 +https://conda.anaconda.org/conda-forge/linux-64/pyqt-5.12.3-py38h578d9bd_7.tar.bz2#7166890c160d0441f59973a40b74f6e5 +https://conda.anaconda.org/conda-forge/noarch/requests-2.25.1-pyhd3deb0d_0.tar.bz2#ae687aba31a1c400192a86a2e993ffdc +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-3.4.2-py38h578d9bd_0.tar.bz2#82aa0479b2189ab97f9e70b90d7ec866 +https://conda.anaconda.org/conda-forge/noarch/sphinx-3.5.4-pyh44b312d_0.tar.bz2#0ebc444f001f73c4f6de01057b0be392 +https://conda.anaconda.org/conda-forge/noarch/sphinx-copybutton-0.3.3-pyhd8ed1ab_0.tar.bz2#b066335fac136c776b7a441e35c1fcb2 +https://conda.anaconda.org/conda-forge/noarch/sphinx-gallery-0.9.0-pyhd8ed1ab_0.tar.bz2#5ef222a3e1b5904742e376e05046692b +https://conda.anaconda.org/conda-forge/noarch/sphinx-panels-0.5.2-pyhd3deb0d_0.tar.bz2#1a871a63c4be1bd47a7aa48b7417a426 +https://conda.anaconda.org/conda-forge/noarch/sphinx_rtd_theme-0.5.2-pyhd8ed1ab_1.tar.bz2#7434e891fc767cb0d39d90751720c8ec From 9cb18fa230828f9c0022c5a462b4dc8d11e5f1a8 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 29 Jun 2021 11:12:23 +0100 Subject: [PATCH 49/53] Small fixes to comments. --- lib/iris/fileformats/netcdf.py | 2 +- .../tests/unit/fileformats/nc_load_rules/actions/__init__.py | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 4c69a3a002..081456c7d4 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -464,7 +464,7 @@ def _assert_case_specific_facts(engine, cf, cf_group): # of the CF data variable. # Collect varnames of formula-root variables as we go. - # NOTE: use dictionary keys as an 'OrderedDict' + # NOTE: use dictionary keys as an 'OrderedSet' # - see: https://stackoverflow.com/a/53657523/2615050 # This is to ensure that we can handle the resulting facts in a definite # order, as using a 'set' led to indeterminate results. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 26f3e7b4bb..4e3d1f946f 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -62,10 +62,8 @@ class Mixin__nc_load_actions: # "global" test setting : whether to output various debug info debug = False - # whether to perform action in both ways and compare results. @classmethod def setUpClass(cls): - # # Control which testing method we are applying. # Create a temp directory for temp files. cls.temp_dirpath = Path(tempfile.mkdtemp()) From a05f7b6552330fd7c5649f02eb77fe7227990fd0 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 30 Jun 2021 15:24:57 +0100 Subject: [PATCH 50/53] Remove refs to pyke in project config files. --- pyproject.toml | 2 -- setup.cfg | 1 - 2 files changed, 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 310bfb05d4..0a672b86bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,8 +30,6 @@ line_length = 79 profile = "black" extend_skip = [ "_build", - "compiled_krb", - "fc_rules_cf.krb", "generated", "sphinxext", "tools", diff --git a/setup.cfg b/setup.cfg index 73714e0a5f..e7e96fa5e8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -122,7 +122,6 @@ exclude = # .eggs, build, - compiled_krb, docs/src/sphinxext/*, tools/*, # From 794cab5cbc4048694125f599fea86095bcfa0865 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 30 Jun 2021 17:06:20 +0100 Subject: [PATCH 51/53] Review changes for comments: fix, clarify + make consistent. --- .../fileformats/_nc_load_rules/__init__.py | 4 - lib/iris/fileformats/netcdf.py | 2 +- .../nc_load_rules/actions/__init__.py | 4 +- .../actions/test__grid_mappings.py | 167 ++++++++++-------- .../actions/test__time_coords.py | 37 ++-- 5 files changed, 111 insertions(+), 103 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py index 4409ace8dc..b102a082df 100644 --- a/lib/iris/fileformats/_nc_load_rules/__init__.py +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -9,8 +9,4 @@ Interprets CF concepts identified by :mod:`iris.fileformats.cf` to add components into loaded cubes. -For now : the API mimics :class:`pyke.knowledge_engine.engine`. -As this is aiming to replace the old Pyke-based logic rules. -TODO: simplify once the parallel operation with Pyke is no longer required. - """ diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 081456c7d4..14dbab8054 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -580,7 +580,7 @@ def _load_cube(engine, cf, cf_var, filename): engine.cube = cube engine.cube_parts = {} engine.requires = {} - engine.rule_triggered = OrderedAddableList() # set() + engine.rule_triggered = OrderedAddableList() engine.filename = filename # Assert all the case-specific facts. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py index 4e3d1f946f..717e5b5c41 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/__init__.py @@ -26,8 +26,8 @@ IN cf : "def _load_cube(engine, cf, cf_var, filename)" WHERE: - engine is a :class:`iris.fileformats._nc_load_rules.engine.Engine` - - cf is a CFReader - - cf_var is a CFDAtaVariable + - cf is a :class:`iris.fileformats.cf.CFReader` + - cf_var is a :class:`iris.fileformats.cf.CFDataVariable` As it's hard to construct a suitable CFReader from scratch, it would seem simpler (for now) to use an ACTUAL FILE. diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index a4cb774249..7e651998ec 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -341,6 +341,7 @@ def tearDownClass(cls): def test_basic_latlon(self): # A basic reference example with a lat-long grid. + # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_grid_mapping_(latitude_longitude) @@ -349,9 +350,9 @@ def test_basic_latlon(self): # 005 : fc_build_coordinate_(latitude) # 006 : fc_build_coordinate_(longitude) # Notes: - # grid-mapping: regular latlon - # dim-coords: lat+lon - # coords-build: standard latlon coords (with latlon coord-system) + # * grid-mapping identified : regular latlon + # * dim-coords identified : lat+lon + # * coords built : standard latlon (with latlon coord-system) result = self.run_testcase() self.check_result(result) @@ -373,9 +374,9 @@ def test_bad_gridmapping_nameproperty(self): # 005 : fc_build_coordinate_(latitude)(no-cs) # 006 : fc_build_coordinate_(longitude)(no-cs) # Notes: - # grid-mapping: NONE - # dim-coords: lat+lon - # coords-build: latlon coords NO coord-system + # * grid-mapping identified : NONE (thus, no coord-system) + # * dim-coords identified : lat+lon + # * coords built : lat+lon coords, with NO coord-system result = self.run_testcase(gridmapvar_mappropertyname="mappy") self.check_result(result, cube_no_cs=True) @@ -389,8 +390,7 @@ def test_latlon_bad_gridmapping_varname(self): # 004 : fc_build_coordinate_(latitude)(no-cs) # 005 : fc_build_coordinate_(longitude)(no-cs) # Notes: - # no coord-system - # all the same as test_bad_gridmapping_nameproperty + # * behaviours all the same as 'test_bad_gridmapping_nameproperty' warning = "Missing.*grid mapping variable 'grid'" result = self.run_testcase(warning=warning, gridmapvar_name="grid_2") self.check_result(result, cube_no_cs=True) @@ -406,15 +406,16 @@ def test_latlon_bad_latlon_unit(self): # 005 : fc_build_coordinate_(miscellaneous) # 006 : fc_build_coordinate_(longitude) # Notes: - # grid-mapping: regular latlon - # dim-coords: + # * grid-mapping identified : regular latlon + # * dim-coords identified : # x is regular longitude dim-coord # y is 'default' coord ==> builds as an 'extra' dim-coord - # coords-build: + # * coords built : # x(lon) is regular latlon with coord-system - # y(lat) is a dim-coord, but NO coord-system - # = "fc_provides_coordinate_latitude" does not trigger, because it is - # not a valid latitude coordinate. + # y(lat) is a dim-coord, but with NO coord-system + # * additional : + # "fc_provides_coordinate_latitude" did not trigger, + # because it is not a valid latitude coordinate. result = self.run_testcase(latitude_units="degrees") self.check_result(result, yco_no_cs=True) @@ -422,7 +423,7 @@ def test_mapping_rotated(self): # Test with rotated-latlon grid-mapping # Distinct from both regular-latlon and non-latlon cases, as the # coordinate standard names and units are different. - # (run_testcase/_make_testcase_cdl know how to handle that). + # ('_make_testcase_cdl' and 'check_result' know how to handle that). # # Rules Triggered: # 001 : fc_default @@ -432,10 +433,10 @@ def test_mapping_rotated(self): # 005 : fc_build_coordinate_(rotated_latitude)(rotated) # 006 : fc_build_coordinate_(rotated_longitude)(rotated) # Notes: - # grid-mapping: rotated lat-lon - # dim-coords: lat+lon - # coords-build: lat+lon coords ROTATED, with coord-system - # (rotated means different name + units) + # * grid-mapping identified : rotated lat-lon + # * dim-coords identified : lat+lon + # * coords built: lat+lon coords ROTATED, with coord-system + # - "rotated" means that they have a different name + units result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON ) @@ -458,10 +459,9 @@ def test_mapping_rotated(self): # 005 : fc_build_coordinate_(projection_y) # 006 : fc_build_coordinate_(projection_x) # Notes: - # grid-mapping: - # dim-coords: proj-x and -y - # coords-build: proj-x/-y_, with coord-system - + # * grid-mapping identified : + # * dim-coords identified : projection__coordinate + # * coords built : projection__coordinate, with coord-system def test_mapping_albers(self): result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_ALBERS) self.check_result(result, cube_cstype=ics.AlbersEqualArea) @@ -491,6 +491,10 @@ def test_mapping_mercator(self): self.check_result(result, cube_cstype=ics.Mercator) def test_mapping_mercator__fail_unsupported(self): + # Provide a mercator grid-mapping with a non-unity scale factor, which + # we cannot handle. + # Result : fails to convert into a coord-system, and emits a warning. + # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_grid_mapping_(mercator) --(FAILED check has_supported_mercator_parameters) @@ -499,12 +503,9 @@ def test_mapping_mercator__fail_unsupported(self): # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) # Notes: - # grid-mapping: NONE - # dim-coords: proj-x and -y - # coords-build: NONE - # = NO coord-system - # = NO dim-coords built (cube has no coords) - # Set a non-unity scale factor, which mercator cannot handle. + # * grid-mapping identified : NONE + # * dim-coords identified : proj-x and -y + # * coords built : NONE (no dim or aux coords: cube has no coords) warning = "not yet supported for Mercator" result = self.run_testcase( warning=warning, @@ -518,6 +519,10 @@ def test_mapping_stereographic(self): self.check_result(result, cube_cstype=ics.Stereographic) def test_mapping_stereographic__fail_unsupported(self): + # As for 'test_mapping_mercator__fail_unsupported', provide a non-unity + # scale factor, which we cannot handle. + # Result : fails to convert into a coord-system, and emits a warning. + # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_grid_mapping_(stereographic) --(FAILED check has_supported_stereographic_parameters) @@ -527,9 +532,6 @@ def test_mapping_stereographic__fail_unsupported(self): # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) # Notes: # as for 'mercator__fail_unsupported', above - # = NO dim-coords built (cube has no coords) - # - # Set a non-unity scale factor, which stereo cannot handle. warning = "not yet supported for stereographic" result = self.run_testcase( warning=warning, @@ -560,9 +562,10 @@ def test_mapping_unsupported(self): # 004 : fc_provides_coordinate_(projection_x) # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) - # NOTES: - # - there is no warning for this. - # TODO: perhaps there should be ? + # Notes: + # * NO grid-mapping is identified (or coord-system built) + # * There is no warning for this : it fails silently. + # TODO: perhaps there _should_ be a warning in such cases ? result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_AZIMUTHAL ) @@ -578,17 +581,31 @@ def test_mapping_undefined(self): # 004 : fc_provides_coordinate_(projection_x) # 005 : fc_build_coordinate_(projection_y)(FAILED projected coord with non-projected cs) # 006 : fc_build_coordinate_(projection_x)(FAILED projected coord with non-projected cs) - # NOTES: - # - there is no warning for this. - # TODO: perhaps there should be ? + # Notes: + # * There is no warning for this : it fails silently. + # TODO: perhaps there _should_ be a warning in such cases ? result = self.run_testcase(mapping_type_name="unknown") self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) # - # Cases where names(+units) of coords don't match the grid-mapping type + # Cases where names(+units) of coords don't match the grid-mapping type. # Effectively, there are 9 possibilities for (latlon/rotated/projected) - # coords against (latlon/rotated/projected/missing) coord-systems. - # N.B. the results are not all the same ... + # coords mismatched to (latlon/rotated/projected/missing) coord-systems. + # + # N.B. the results are not all the same : + # + # 1. when a coord and the grid-mapping have the same 'type', + # i.e. plain-latlon, rotated-latlon or non-latlon, then dim-coords are + # built with a coord-system (as seen previously). + # 2. when there is no grid-mapping, we can build coords of any type, + # but with no coord-system. + # 3. when one of (coord + grid-mapping) is plain-latlon or rotated-latlon, + # and the other is non-latlon (i.e. any other type), + # then we build coords *without* a coord-system + # 4. when one of (coord + grid-mapping) is plain-latlon, and the other is + # rotated-latlon, we don't build coords at all. + # TODO: it's not clear why this needs to behave differently from case + # (3.) : possibly, these two should be made consistent. # def test_mapping__mismatch__latlon_coords_rotated_system(self): @@ -599,9 +616,8 @@ def test_mapping__mismatch__latlon_coords_rotated_system(self): # 004 : fc_provides_coordinate_(longitude) # 005 : fc_build_coordinate_(latitude)(FAILED : latlon coord with rotated cs) # 006 : fc_build_coordinate_(longitude)(FAILED : latlon coord with rotated cs) - # NOTES: - # no build_coord triggers, as it requires the correct mapping type - # so no dim-coords at all in this case + # Notes: + # * coords built : NONE (see above) result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, xco_name="longitude", @@ -619,10 +635,8 @@ def test_mapping__mismatch__latlon_coords_nonll_system(self): # 004 : fc_provides_coordinate_(longitude) # 005 : fc_build_coordinate_(latitude)(no-cs : discarded projected cs) # 006 : fc_build_coordinate_(longitude)(no-cs : discarded projected cs) - # NOTES: - # build_coord_XXX_cs triggers, requires NO latlon/rotated mapping - # - but a non-ll mapping is 'ok'. - # TODO: not really clear why this is right ? + # Notes: + # * coords built : lat + lon, with no coord-system (see above) result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, xco_name="longitude", @@ -639,9 +653,8 @@ def test_mapping__mismatch__latlon_coords_missing_system(self): # 003 : fc_provides_coordinate_(longitude) # 004 : fc_build_coordinate_(latitude)(no-cs) # 005 : fc_build_coordinate_(longitude)(no-cs) - # NOTES: - # same as nonll, except *NO* grid-mapping is detected, - # - which makes no practical difference + # Notes: + # * coords built : lat + lon, with no coord-system (see above) warning = "Missing.*grid mapping variable 'grid'" result = self.run_testcase( warning=warning, @@ -661,9 +674,8 @@ def test_mapping__mismatch__rotated_coords_latlon_system(self): # 004 : fc_provides_coordinate_(rotated_longitude) # 005 : fc_build_coordinate_(rotated_latitude)(FAILED rotated coord with latlon cs) # 006 : fc_build_coordinate_(rotated_longitude)(FAILED rotated coord with latlon cs) - # NOTES: - # no build_coord triggers : requires NO latlon/rotated mapping - # hence no coords at all + # Notes: + # * coords built : NONE (see above) result = self.run_testcase( xco_name="grid_longitude", xco_units="degrees", @@ -680,10 +692,8 @@ def test_mapping__mismatch__rotated_coords_nonll_system(self): # 004 : fc_provides_coordinate_(rotated_longitude) # 005 : fc_build_coordinate_(rotated_latitude)(rotated no-cs : discarded projected cs) # 006 : fc_build_coordinate_(rotated_longitude)(rotated no-cs : discarded projected cs) - # NOTES: - # this is different from the previous - # build_coord.._nocs triggers : requires NO latlon/rotated mapping - # - which seems odd + inconsistent (with previous) ? + # Notes: + # * coords built : rotated-lat + lon, with no coord-system (see above) # TODO: should this change ?? result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, @@ -701,8 +711,8 @@ def test_mapping__mismatch__rotated_coords_missing_system(self): # 003 : fc_provides_coordinate_(rotated_longitude) # 004 : fc_build_coordinate_(rotated_latitude)(rotated no-cs) # 005 : fc_build_coordinate_(rotated_longitude)(rotated no-cs) - # NOTES: - # as previous, but no grid-mapping (which makes no difference) + # Notes: + # * coords built : rotated lat + lon, with no coord-system (see above) warning = "Missing.*grid mapping variable 'grid'" result = self.run_testcase( warning=warning, @@ -722,8 +732,9 @@ def test_mapping__mismatch__nonll_coords_latlon_system(self): # 004 : fc_default_coordinate_(provide-phase) # 005 : fc_build_coordinate_(miscellaneous) # 006 : fc_build_coordinate_(miscellaneous) - # NOTES: - # dim-coords built as "defaults" : dim-coords, but NO standard name + # Notes: + # * coords built : projection x + y, with no coord-system (see above) + # * the coords build as "default" type : they have no standard-name result = self.run_testcase( xco_name="projection_x", xco_units="m", @@ -742,8 +753,8 @@ def test_mapping__mismatch__nonll_coords_rotated_system(self): # 004 : fc_default_coordinate_(provide-phase) # 005 : fc_build_coordinate_(miscellaneous) # 006 : fc_build_coordinate_(miscellaneous) - # NOTES: - # same as previous __mismatch__nonll_ + # Notes: + # * as previous case '__mismatch__nonll_' result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, xco_name="projection_x", @@ -762,8 +773,8 @@ def test_mapping__mismatch__nonll_coords_missing_system(self): # 003 : fc_default_coordinate_(provide-phase) # 004 : fc_build_coordinate_(miscellaneous) # 005 : fc_build_coordinate_(miscellaneous) - # NOTES: - # effectively, just like previous 2 __mismatch__nonll_ + # Notes: + # * effectively, just like previous 2 cases warning = "Missing.*grid mapping variable 'grid'" result = self.run_testcase( warning=warning, @@ -801,6 +812,8 @@ def test_aux_lon(self): self.check_result(result, xco_is_aux=True, xco_no_cs=True) def test_aux_lat(self): + # As previous, but with the Y coord. + # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_grid_mapping_(latitude_longitude) @@ -811,21 +824,23 @@ def test_aux_lat(self): self.check_result(result, yco_is_aux=True, yco_no_cs=True) def test_aux_lat_and_lon(self): - # When *both* are aux, the grid-mapping is discarded. - # - as in this case there are then no dim-coords to reference it. + # Make *both* X and Y coords into aux-coords. # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_grid_mapping_(latitude_longitude) # 003 : fc_build_auxiliary_coordinate_longitude # 004 : fc_build_auxiliary_coordinate_latitude + # Notes: + # * a grid-mapping is recognised, but discarded, as in this case + # there are no dim-coords to reference it. result = self.run_testcase(xco_is_dim=False, yco_is_dim=False) self.check_result( result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=True ) def test_aux_lon_rotated(self): - # Same but with rotated-style lat + lon coords. + # Rotated-style lat + lon coords, X is an aux-coord. # # Rules Triggered: # 001 : fc_default @@ -833,6 +848,8 @@ def test_aux_lon_rotated(self): # 003 : fc_provides_coordinate_(rotated_latitude) # 004 : fc_build_coordinate_(rotated_latitude)(rotated) # 005 : fc_build_auxiliary_coordinate_longitude_rotated + # Notes: + # * as the plain-latlon case 'test_aux_lon'. result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, xco_is_dim=False, @@ -840,12 +857,16 @@ def test_aux_lon_rotated(self): self.check_result(result, xco_is_aux=True, xco_no_cs=True) def test_aux_lat_rotated(self): + # Rotated-style lat + lon coords, Y is an aux-coord. + # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_grid_mapping_(rotated_latitude_longitude) # 003 : fc_provides_coordinate_(rotated_longitude) # 004 : fc_build_coordinate_(rotated_longitude)(rotated) # 005 : fc_build_auxiliary_coordinate_latitude_rotated + # Notes: + # * as the plain-latlon case 'test_aux_lat'. result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, yco_is_dim=False, @@ -863,7 +884,7 @@ def tearDownClass(cls): super().tearDownClass() def test_nondim_lats(self): - # Check what happens when values don't allow a coord to be dim-coord. + # Fix a coord's values so it cannot be a dim-coord. # # Rules Triggered: # 001 : fc_default @@ -872,9 +893,9 @@ def test_nondim_lats(self): # 004 : fc_provides_coordinate_(longitude) # 005 : fc_build_coordinate_(latitude) # 006 : fc_build_coordinate_(longitude) - # NOTES: - # in terms of rule triggers, this is not distinct from a normal case - # - but the latitude is now an aux-coord. + # Notes: + # * in terms of rule triggering, this is not distinct from the + # "normal" case : but latitude is now created as an aux-coord. warning = "must be.* monotonic" result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) self.check_result(result, yco_is_aux=True) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index 9a2e916380..ebe9c0eb46 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -294,6 +294,7 @@ def test_dimension(self): def test_dimension_in_phenom_coords(self): # Dimension coord also present in phenom:coords. # Strictly wrong but a common error in datafiles : must tolerate. + # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_coordinate_(time[[_period]]) @@ -306,6 +307,7 @@ def test_dim_nonmonotonic(self): # The rule has a special way of treating it as an aux coord # -- even though it doesn't appear in the phenom coords. # ( Done by the build_coord routine, so not really a rules issue). + # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_coordinate_(time[[_period]]) @@ -320,9 +322,10 @@ def test_dim_fails_typeident(self): # 'is_period' test, so the 'provides_coord' rule fails to trigger. # So it is built as a 'miscellaneous' dim-coord. # N.B. this makes *no* practical difference, because a 'misc' dim - # coord is still a dim coord (albeit with bad units). + # coord is still a dim coord (albeit one with bad units). # ( N.B.#2 Not quite the same for lat/lon coords, where coord-specific # 'build' rules always use a fixed standard-name ). + # # Rules Triggered: # 001 : fc_default # 002 : fc_default_coordinate_(provide-phase) @@ -335,6 +338,7 @@ def test_aux(self): # For this, rename both DIMENSIONS, so that the generated coords are # not actually CF coordinates. # For a valid case, we must *also* have a ref in phenom:coordinates + # # Rules Triggered: # 001 : fc_default # 002 : fc_build_auxiliary_coordinate_time[[_period]] @@ -349,6 +353,7 @@ def test_aux_not_in_phenom_coords(self): # time/period is installed as an auxiliary coord, # but we DIDN'T list it in phenom:coords -- otherwise as previous. # Should have no result at all. + # # Rules Triggered: # 001 : fc_default result = self.run_testcase( @@ -359,31 +364,17 @@ def test_aux_not_in_phenom_coords(self): self.check_result(result, "missing") def test_aux_fails_typeident(self): - # The coord variable is identified as a CFAuxiliaryCoordinate by cf.py, - # but having the wrong units causes it to fail the 'is_time' or - # 'is_period' test, so the 'provides_coord' rule fails to trigger. - # Rules Triggered: - # 001 : fc_default - # 002 : fc_build_auxiliary_coordinate - # Again, though it builds as a 'miscellaneous' rather than a recognised - # specific coord type, it makes no practical difference. - result = self.run_testcase( - coord_dim_name="dim_renamed", - dimname="dim_renamed", - in_phenomvar_coords=True, - units="1", - ) - self.check_result(result, "aux") - - def test_aux_no_coordsref(self): - # The coord variable is identified as a CFAuxiliaryCoordinate by cf.py, - # but having the wrong units causes it to fail the 'is_time' or - # 'is_period' test. + # We provide a non-dimension coord variable, identified as a + # CFAuxiliaryCoordinate by cf.py, but we also give it the wrong units, + # which causes it to fail both 'is_time' and 'is_period' tests, so the + # 'provides_coord' rule fails to trigger. + # As in 'test_dim_fails_typeident', the coord is built as a + # 'miscellaneous' rather than a specific coord type (recognised from its + # name), but this makes absolutely no practical difference. + # # Rules Triggered: # 001 : fc_default # 002 : fc_build_auxiliary_coordinate - # Again, though it builds as a 'miscellaneous' rather than a reocgnised - # specific coord type, it makes no practical difference. result = self.run_testcase( coord_dim_name="dim_renamed", dimname="dim_renamed", From 97b20aa95041135f40fa813f5b76825dec3af498 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 1 Jul 2021 11:15:19 +0100 Subject: [PATCH 52/53] Review change: correct test comment. --- .../nc_load_rules/actions/test__time_coords.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index ebe9c0eb46..e179b3693c 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -365,12 +365,16 @@ def test_aux_not_in_phenom_coords(self): def test_aux_fails_typeident(self): # We provide a non-dimension coord variable, identified as a - # CFAuxiliaryCoordinate by cf.py, but we also give it the wrong units, - # which causes it to fail both 'is_time' and 'is_period' tests, so the - # 'provides_coord' rule fails to trigger. - # As in 'test_dim_fails_typeident', the coord is built as a - # 'miscellaneous' rather than a specific coord type (recognised from its - # name), but this makes absolutely no practical difference. + # CFAuxiliaryCoordinate by cf.py, but we also give it "wrong" units, + # unsuitable for a time or period coord. + # Because it fails both 'is_time' and 'is_period' tests, it then does + # not trigger 'fc_build_auxiliary_coordinate_time[[_period]]'. + # As in the above testcase 'test_dim_fails_typeident', the routine + # 'action_build_auxiliary_coordinate' therefore builds this as a + # 'miscellaneous' rather than a specific coord type (time or period). + # However, also as in that other case, this makes absolutely no + # practical difference -- unlike for latitude or longitutude coords, + # where it may affect the standard-name. # # Rules Triggered: # 001 : fc_default From 16be67b274f37f3612bf65474d5edf1336e0f8f3 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 1 Jul 2021 12:42:43 +0100 Subject: [PATCH 53/53] Review changes: clarify comments. --- .../actions/test__grid_mappings.py | 5 ++++- .../nc_load_rules/actions/test__time_coords.py | 17 ++++++++++------- 2 files changed, 14 insertions(+), 8 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py index 7e651998ec..a2ecdf1490 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__grid_mappings.py @@ -607,6 +607,10 @@ def test_mapping_undefined(self): # TODO: it's not clear why this needs to behave differently from case # (3.) : possibly, these two should be made consistent. # + # TODO: *all* these 'mismatch' cases should probably generate warnings, + # except for plain-latlon coords with no grid-mapping. + # At present, we _only_ warn when an expected grid-mapping is absent. + # def test_mapping__mismatch__latlon_coords_rotated_system(self): # Rules Triggered: @@ -694,7 +698,6 @@ def test_mapping__mismatch__rotated_coords_nonll_system(self): # 006 : fc_build_coordinate_(rotated_longitude)(rotated no-cs : discarded projected cs) # Notes: # * coords built : rotated-lat + lon, with no coord-system (see above) - # TODO: should this change ?? result = self.run_testcase( mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, xco_name="grid_longitude", diff --git a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py index e179b3693c..47760aadcb 100644 --- a/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/nc_load_rules/actions/test__time_coords.py @@ -317,14 +317,17 @@ def test_dim_nonmonotonic(self): self.check_result(result, "aux") def test_dim_fails_typeident(self): - # The coord variable is identified as a CFDimensionCoordinate by cf.py, - # but having the wrong units causes it to fail the 'is_time' or - # 'is_period' test, so the 'provides_coord' rule fails to trigger. - # So it is built as a 'miscellaneous' dim-coord. + # Provide a coord variable, identified as a CFDimensionCoordinate by + # cf.py, but with the "wrong" units for a time or period coord. + # This causes it to fail both 'is_time' and 'is_period' tests and so, + # within the 'action_provides_coordinate' routine, does not trigger as + # a 'provides_coord_(time[[_period]])' rule, but instead as a + # 'default_coordinate_(provide-phase)'. + # As a result, it is built as a 'miscellaneous' dim-coord. # N.B. this makes *no* practical difference, because a 'misc' dim - # coord is still a dim coord (albeit one with bad units). - # ( N.B.#2 Not quite the same for lat/lon coords, where coord-specific - # 'build' rules always use a fixed standard-name ). + # coord is still a dim coord (albeit one with incorrect units). + # N.B.#2 that is different from lat/lon coords, where the coord-specific + # 'build' rules have the extra effect of setting a fixed standard-name. # # Rules Triggered: # 001 : fc_default