From 216610cfb9dc4b79757547fa28db735341d930ce Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 19 May 2021 19:35:49 +0100 Subject: [PATCH 01/35] First steps in parallel rules implementation. --- .../fileformats/_nc_load_rules/__init__.py | 1 + lib/iris/fileformats/_nc_load_rules/engine.py | 123 ++ .../fileformats/_nc_load_rules/helpers.py | 1303 +++++++++++++++++ lib/iris/fileformats/_nc_load_rules/rules.py | 304 ++++ lib/iris/fileformats/netcdf.py | 30 +- lib/iris/tests/test_netcdf.py | 17 + 6 files changed, 1776 insertions(+), 2 deletions(-) create mode 100644 lib/iris/fileformats/_nc_load_rules/__init__.py create mode 100644 lib/iris/fileformats/_nc_load_rules/engine.py create mode 100644 lib/iris/fileformats/_nc_load_rules/helpers.py create mode 100644 lib/iris/fileformats/_nc_load_rules/rules.py diff --git a/lib/iris/fileformats/_nc_load_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py new file mode 100644 index 0000000000..cfbff5bc7c --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -0,0 +1 @@ +# Support for replacing Pyke rules. diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py new file mode 100644 index 0000000000..780858df81 --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -0,0 +1,123 @@ +""" +A simple mimic of the Pyke 'knwoledge_engine', for interfacing to the routines +in 'iris.fileformats.netcdf' with minimal changes to that code. + +The core of this is the 'Engine' class, which mimics the Pyke engine operations, +as used by our code to translate each data cube. + +engine.get_kb() also returns a FactEntity object, which mimics *just enough* +API of a Pyke.knowlege_base, so that we can list its case-specific facts, as +used in :meth:`iris.fileformats.netcdf.pyke_stats`. + +""" +from .rules import run_rules + + +class FactList: + def __init__(self): + self.case_specific_facts = [] + + +class FactEntity: + # To support: + """ + kb_facts = engine.get_kb(_PYKE_FACT_BASE) + + for key in kb_facts.entity_lists.keys(): + for arg in kb_facts.entity_lists[key].case_specific_facts: + print("\t%s%s" % (key, arg)) + + """ + + def __init__(self): + self.entity_lists = {} + + def add_fact(self, fact_name, args): + if fact_name not in self.entity_lists: + self.entity_lists[fact_name] = FactList() + fact_list = self.entity_lists[fact_name] + fact_list.case_specific_facts.append(tuple(args)) + + def sect_facts(self, entity_name): + if entity_name in self.entity_lists: + facts = self.entity_lists.get(entity_name).case_specific_facts + else: + facts = [] + return facts + + +class Engine: + """ + A minimal mimic of a Pyke.engine. + + Provides just enough API so that the existing code in + :mod:`iris.fileformats.netcdf` can interface with our new rules functions. + + """ + + def __init__(self): + """Init new engine.""" + self.reset() + + def reset(self): + """Reset the engine = remove all facts.""" + self.facts = FactEntity() + + def activate(self, rules_base_str=None): + """ + Run all the translation rules to produce a single output cube. + + This implicitly references the output variable for this operation, + set by engine.cf_var (the variable name). + + The rules operation itself is coded elsewhere, + in :mod:`iris.fileformats.netcdf._nc_load_rules.rules`. + + """ + run_rules(self) + + def print_stats(self): + """No-op, called by :meth:`iris.fileformats.netcdf.pyke_stats`.""" + pass + + def add_case_specific_fact(self, kb_name, fact_name, fact_arglist): + """ + Record a fact about the current output operation. + + Roughly, self.facts.entity_lists[fact_name].append(fact_arglist). + + """ + self.facts.add_fact(fact_name, fact_arglist) + + def get_kb(self, fact_base_str=None): + """ + Get a FactEntity, which mimic (bits of) a knowledge-base. + + Just allowing + :meth:`iris.fileformats.netcdf.pyke_stats` to list the facts. + + """ + return self.facts + + def fact_list(self, fact_name): + """ + Return the facts (arg-lists) for one fact name. + + A shorthand form used only by the new rules routines. + + AKA 'case-specific-facts', in the original. + Roughly "return self.facts.entity_lists[fact_name]". + + """ + return self.facts.sect_facts(fact_name) + + def add_fact(self, fact_name, fact_arglist): + """ + Add a new fact. + + A shorthand form used only by the new rules routines. + + """ + self.add_case_specific_fact( + kb_name="", fact_name=fact_name, fact_arglist=fact_arglist + ) diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py new file mode 100644 index 0000000000..9a908a95a1 --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -0,0 +1,1303 @@ +""" +All the pure-Python 'helper' functions which previously included in the Pyke +rules database. + +Initially these haven't changed. +The new rules approach is still calling most of them. + +""" + +import warnings + +import cf_units +import numpy as np +import numpy.ma as ma + +import iris.aux_factory +from iris.common.mixin import _get_valid_standard_name +import iris.coords +import iris.coord_systems +import iris.fileformats.cf as cf +import iris.fileformats.netcdf +from iris.fileformats.netcdf import ( + _get_cf_var_data, + parse_cell_methods, + UnknownCellMethodWarning, +) +import iris.exceptions +import iris.std_names +import iris.util + + +# +# UD Units Constants (based on Unidata udunits.dat definition file) +# +UD_UNITS_LAT = [ + "degrees_north", + "degree_north", + "degree_n", + "degrees_n", + "degreen", + "degreesn", + "degrees", + "degrees north", + "degree north", + "degree n", + "degrees n", +] +UD_UNITS_LON = [ + "degrees_east", + "degree_east", + "degree_e", + "degrees_e", + "degreee", + "degreese", + "degrees", + "degrees east", + "degree east", + "degree e", + "degrees e", +] +UNKNOWN_UNIT_STRING = "?" +NO_UNIT_STRING = "-" + +# +# CF Dimensionless Vertical Coordinates +# +CF_COORD_VERTICAL = { + "atmosphere_ln_pressure_coordinate": ["p0", "lev"], + "atmosphere_sigma_coordinate": ["sigma", "ps", "ptop"], + "atmosphere_hybrid_sigma_pressure_coordinate": ["a", "b", "ps", "p0"], + "atmosphere_hybrid_height_coordinate": ["a", "b", "orog"], + "atmosphere_sleve_coordinate": [ + "a", + "b1", + "b2", + "ztop", + "zsurf1", + "zsurf2", + ], + "ocean_sigma_coordinate": ["sigma", "eta", "depth"], + "ocean_s_coordinate": ["s", "eta", "depth", "a", "b", "depth_c"], + "ocean_sigma_z_coordinate": [ + "sigma", + "eta", + "depth", + "depth_c", + "nsigma", + "zlev", + ], + "ocean_double_sigma_coordinate": [ + "sigma", + "depth", + "z1", + "z2", + "a", + "href", + "k_c", + ], + "ocean_s_coordinate_g1": ["s", "eta", "depth", "depth_c", "C"], + "ocean_s_coordinate_g2": ["s", "eta", "depth", "depth_c", "C"], +} + +# +# CF Grid Mappings +# +CF_GRID_MAPPING_ALBERS = "albers_conical_equal_area" +CF_GRID_MAPPING_AZIMUTHAL = "azimuthal_equidistant" +CF_GRID_MAPPING_LAMBERT_AZIMUTHAL = "lambert_azimuthal_equal_area" +CF_GRID_MAPPING_LAMBERT_CONFORMAL = "lambert_conformal_conic" +CF_GRID_MAPPING_LAMBERT_CYLINDRICAL = "lambert_cylindrical_equal_area" +CF_GRID_MAPPING_LAT_LON = "latitude_longitude" +CF_GRID_MAPPING_MERCATOR = "mercator" +CF_GRID_MAPPING_ORTHO = "orthographic" +CF_GRID_MAPPING_POLAR = "polar_stereographic" +CF_GRID_MAPPING_ROTATED_LAT_LON = "rotated_latitude_longitude" +CF_GRID_MAPPING_STEREO = "stereographic" +CF_GRID_MAPPING_TRANSVERSE = "transverse_mercator" +CF_GRID_MAPPING_VERTICAL = "vertical_perspective" +CF_GRID_MAPPING_GEOSTATIONARY = "geostationary" + +# +# CF Attribute Names. +# +CF_ATTR_AXIS = "axis" +CF_ATTR_BOUNDS = "bounds" +CF_ATTR_CALENDAR = "calendar" +CF_ATTR_CLIMATOLOGY = "climatology" +CF_ATTR_GRID_INVERSE_FLATTENING = "inverse_flattening" +CF_ATTR_GRID_EARTH_RADIUS = "earth_radius" +CF_ATTR_GRID_MAPPING_NAME = "grid_mapping_name" +CF_ATTR_GRID_NORTH_POLE_LAT = "grid_north_pole_latitude" +CF_ATTR_GRID_NORTH_POLE_LON = "grid_north_pole_longitude" +CF_ATTR_GRID_NORTH_POLE_GRID_LON = "north_pole_grid_longitude" +CF_ATTR_GRID_SEMI_MAJOR_AXIS = "semi_major_axis" +CF_ATTR_GRID_SEMI_MINOR_AXIS = "semi_minor_axis" +CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN = "latitude_of_projection_origin" +CF_ATTR_GRID_LON_OF_PROJ_ORIGIN = "longitude_of_projection_origin" +CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" +CF_ATTR_GRID_FALSE_EASTING = "false_easting" +CF_ATTR_GRID_FALSE_NORTHING = "false_northing" +CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN = "scale_factor_at_projection_origin" +CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN = "scale_factor_at_central_meridian" +CF_ATTR_GRID_LON_OF_CENT_MERIDIAN = "longitude_of_central_meridian" +CF_ATTR_GRID_STANDARD_PARALLEL = "standard_parallel" +CF_ATTR_GRID_PERSPECTIVE_HEIGHT = "perspective_point_height" +CF_ATTR_GRID_SWEEP_ANGLE_AXIS = "sweep_angle_axis" +CF_ATTR_POSITIVE = "positive" +CF_ATTR_STD_NAME = "standard_name" +CF_ATTR_LONG_NAME = "long_name" +CF_ATTR_UNITS = "units" +CF_ATTR_CELL_METHODS = "cell_methods" + +# +# CF Attribute Value Constants. +# +# Attribute - axis. +CF_VALUE_AXIS_X = "x" +CF_VALUE_AXIS_Y = "y" +CF_VALUE_AXIS_T = "t" +CF_VALUE_AXIS_Z = "z" + + +# Attribute - positive. +CF_VALUE_POSITIVE = ["down", "up"] + +# Attribute - standard_name. +CF_VALUE_STD_NAME_LAT = "latitude" +CF_VALUE_STD_NAME_LON = "longitude" +CF_VALUE_STD_NAME_GRID_LAT = "grid_latitude" +CF_VALUE_STD_NAME_GRID_LON = "grid_longitude" +CF_VALUE_STD_NAME_PROJ_X = "projection_x_coordinate" +CF_VALUE_STD_NAME_PROJ_Y = "projection_y_coordinate" + + +################################################################################ +def build_cube_metadata(engine): + """Add the standard meta data to the cube.""" + + cf_var = engine.cf_var + cube = engine.cube + + # Determine the cube's name attributes + cube.var_name = cf_var.cf_name + standard_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + long_name = getattr(cf_var, CF_ATTR_LONG_NAME, None) + cube.long_name = long_name + + if standard_name is not None: + try: + cube.standard_name = _get_valid_standard_name(standard_name) + except ValueError: + if cube.long_name is not None: + cube.attributes["invalid_standard_name"] = standard_name + else: + cube.long_name = standard_name + + # Determine the cube units. + attr_units = get_attr_units(cf_var, cube.attributes) + cube.units = attr_units + + # Incorporate cell methods + nc_att_cell_methods = getattr(cf_var, CF_ATTR_CELL_METHODS, None) + with warnings.catch_warnings(record=True) as warning_records: + cube.cell_methods = parse_cell_methods(nc_att_cell_methods) + # Filter to get the warning we are interested in. + warning_records = [ + record + for record in warning_records + if issubclass(record.category, UnknownCellMethodWarning) + ] + if len(warning_records) > 0: + # Output an enhanced warning message. + warn_record = warning_records[0] + name = "{}".format(cf_var.cf_name) + msg = warn_record.message.args[0] + msg = msg.replace("variable", "variable {!r}".format(name)) + warnings.warn(message=msg, category=UnknownCellMethodWarning) + + # Set the cube global attributes. + for attr_name, attr_value in cf_var.cf_group.global_attributes.items(): + try: + cube.attributes[str(attr_name)] = attr_value + except ValueError as e: + msg = "Skipping global attribute {!r}: {}" + warnings.warn(msg.format(attr_name, str(e))) + + +################################################################################ +def _get_ellipsoid(cf_grid_var): + """Return the ellipsoid definition.""" + major = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MAJOR_AXIS, None) + minor = getattr(cf_grid_var, CF_ATTR_GRID_SEMI_MINOR_AXIS, None) + inverse_flattening = getattr( + cf_grid_var, CF_ATTR_GRID_INVERSE_FLATTENING, None + ) + + # Avoid over-specification exception. + if major is not None and minor is not None: + inverse_flattening = None + + # Check for a default spherical earth. + if major is None and minor is None and inverse_flattening is None: + major = getattr(cf_grid_var, CF_ATTR_GRID_EARTH_RADIUS, None) + + return major, minor, inverse_flattening + + +################################################################################ +def build_coordinate_system(cf_grid_var): + """Create a coordinate system from the CF-netCDF grid mapping variable.""" + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + return iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + +################################################################################ +def build_rotated_coordinate_system(engine, cf_grid_var): + """Create a rotated coordinate system from the CF-netCDF grid mapping variable.""" + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + north_pole_latitude = getattr( + cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LAT, 90.0 + ) + north_pole_longitude = getattr( + cf_grid_var, CF_ATTR_GRID_NORTH_POLE_LON, 0.0 + ) + if north_pole_latitude is None or north_pole_longitude is None: + warnings.warn("Rotated pole position is not fully specified") + + north_pole_grid_lon = getattr( + cf_grid_var, CF_ATTR_GRID_NORTH_POLE_GRID_LON, 0.0 + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + rcs = iris.coord_systems.RotatedGeogCS( + north_pole_latitude, + north_pole_longitude, + north_pole_grid_lon, + ellipsoid, + ) + + return rcs + + +################################################################################ +def build_transverse_mercator_coordinate_system(engine, cf_grid_var): + """ + Create a transverse Mercator coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + scale_factor_at_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_CENT_MERIDIAN, None + ) + + # The following accounts for the inconsistancy in the transverse + # mercator description within the CF spec. + if longitude_of_central_meridian is None: + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + if scale_factor_at_central_meridian is None: + scale_factor_at_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.TransverseMercator( + latitude_of_projection_origin, + longitude_of_central_meridian, + false_easting, + false_northing, + scale_factor_at_central_meridian, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_lambert_conformal_coordinate_system(engine, cf_grid_var): + """ + Create a Lambert conformal conic coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.LambertConformal( + latitude_of_projection_origin, + longitude_of_central_meridian, + false_easting, + false_northing, + standard_parallel, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_stereographic_coordinate_system(engine, cf_grid_var): + """ + Create a stereographic coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + # Iris currently only supports Stereographic projections with a scale + # factor of 1.0. This is checked elsewhere. + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.Stereographic( + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting, + false_northing, + true_scale_lat=None, + ellipsoid=ellipsoid, + ) + + return cs + + +################################################################################ +def build_mercator_coordinate_system(engine, cf_grid_var): + """ + Create a Mercator coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + # Iris currently only supports Mercator projections with specific + # values for false_easting, false_northing, + # scale_factor_at_projection_origin and standard_parallel. These are + # checked elsewhere. + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.Mercator( + longitude_of_projection_origin, ellipsoid=ellipsoid + ) + + return cs + + +################################################################################ +def build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var): + """ + Create a lambert azimuthal equal area coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.LambertAzimuthalEqualArea( + latitude_of_projection_origin, + longitude_of_projection_origin, + false_easting, + false_northing, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_albers_equal_area_coordinate_system(engine, cf_grid_var): + """ + Create a albers conical equal area coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_central_meridian = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_CENT_MERIDIAN, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + standard_parallels = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.AlbersEqualArea( + latitude_of_projection_origin, + longitude_of_central_meridian, + false_easting, + false_northing, + standard_parallels, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_vertical_perspective_coordinate_system(engine, cf_grid_var): + """ + Create a vertical perspective coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + perspective_point_height = getattr( + cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.VerticalPerspective( + latitude_of_projection_origin, + longitude_of_projection_origin, + perspective_point_height, + false_easting, + false_northing, + ellipsoid, + ) + + return cs + + +################################################################################ +def build_geostationary_coordinate_system(engine, cf_grid_var): + """ + Create a geostationary coordinate system from the CF-netCDF + grid mapping variable. + + """ + major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) + + latitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN, None + ) + longitude_of_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_LON_OF_PROJ_ORIGIN, None + ) + perspective_point_height = getattr( + cf_grid_var, CF_ATTR_GRID_PERSPECTIVE_HEIGHT, None + ) + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + sweep_angle_axis = getattr( + cf_grid_var, CF_ATTR_GRID_SWEEP_ANGLE_AXIS, None + ) + + ellipsoid = None + if ( + major is not None + or minor is not None + or inverse_flattening is not None + ): + ellipsoid = iris.coord_systems.GeogCS(major, minor, inverse_flattening) + + cs = iris.coord_systems.Geostationary( + latitude_of_projection_origin, + longitude_of_projection_origin, + perspective_point_height, + sweep_angle_axis, + false_easting, + false_northing, + ellipsoid, + ) + + return cs + + +################################################################################ +def get_attr_units(cf_var, attributes): + attr_units = getattr(cf_var, CF_ATTR_UNITS, UNKNOWN_UNIT_STRING) + if not attr_units: + attr_units = UNKNOWN_UNIT_STRING + + # Sanitise lat/lon units. + if attr_units in UD_UNITS_LAT or attr_units in UD_UNITS_LON: + attr_units = "degrees" + + # Graceful loading of invalid units. + try: + cf_units.as_unit(attr_units) + except ValueError: + # Using converted unicode message. Can be reverted with Python 3. + msg = "Ignoring netCDF variable {!r} invalid units {!r}".format( + cf_var.cf_name, attr_units + ) + warnings.warn(msg) + attributes["invalid_units"] = attr_units + attr_units = UNKNOWN_UNIT_STRING + + if np.issubdtype(cf_var.dtype, np.str_): + attr_units = NO_UNIT_STRING + + if any( + hasattr(cf_var.cf_data, name) + for name in ("flag_values", "flag_masks", "flag_meanings") + ): + attr_units = cf_units._NO_UNIT_STRING + + # Get any assoicated calendar for a time reference coordinate. + if cf_units.as_unit(attr_units).is_time_reference(): + attr_calendar = getattr(cf_var, CF_ATTR_CALENDAR, None) + + if attr_calendar: + attr_units = cf_units.Unit(attr_units, calendar=attr_calendar) + + return attr_units + + +################################################################################ +def get_names(cf_coord_var, coord_name, attributes): + """Determine the standard_name, long_name and var_name attributes.""" + + standard_name = getattr(cf_coord_var, CF_ATTR_STD_NAME, None) + long_name = getattr(cf_coord_var, CF_ATTR_LONG_NAME, None) + cf_name = str(cf_coord_var.cf_name) + + if standard_name is not None: + try: + standard_name = _get_valid_standard_name(standard_name) + except ValueError: + if long_name is not None: + attributes["invalid_standard_name"] = standard_name + if coord_name is not None: + standard_name = coord_name + else: + standard_name = None + else: + if coord_name is not None: + attributes["invalid_standard_name"] = standard_name + standard_name = coord_name + else: + standard_name = None + + else: + if coord_name is not None: + standard_name = coord_name + + # Last attempt to set the standard name to something meaningful. + if standard_name is None: + if cf_name in iris.std_names.STD_NAMES: + standard_name = cf_name + + return (standard_name, long_name, cf_name) + + +################################################################################ +def get_cf_bounds_var(cf_coord_var): + """ + Return the CF variable representing the bounds of a coordinate + variable. + + """ + attr_bounds = getattr(cf_coord_var, CF_ATTR_BOUNDS, None) + attr_climatology = getattr(cf_coord_var, CF_ATTR_CLIMATOLOGY, None) + + # Determine bounds, prefering standard bounds over climatology. + # NB. No need to raise a warning if the bounds/climatology + # variable is missing, as that will already have been done by + # iris.fileformats.cf. + cf_bounds_var = None + climatological = False + if attr_bounds is not None: + bounds_vars = cf_coord_var.cf_group.bounds + if attr_bounds in bounds_vars: + cf_bounds_var = bounds_vars[attr_bounds] + elif attr_climatology is not None: + climatology_vars = cf_coord_var.cf_group.climatology + if attr_climatology in climatology_vars: + cf_bounds_var = climatology_vars[attr_climatology] + climatological = True + + if attr_bounds is not None and attr_climatology is not None: + warnings.warn( + "Ignoring climatology in favour of bounds attribute " + "on NetCDF variable {!r}.".format(cf_coord_var.cf_name) + ) + + return cf_bounds_var, climatological + + +################################################################################ +def reorder_bounds_data(bounds_data, cf_bounds_var, cf_coord_var): + """ + Return a bounds_data array with the vertex dimension as the most + rapidly varying. + + .. note:: + + This function assumes the dimension names of the coordinate + variable match those of the bounds variable in order to determine + which is the vertex dimension. + + + """ + vertex_dim_names = set(cf_bounds_var.dimensions).difference( + cf_coord_var.dimensions + ) + if len(vertex_dim_names) != 1: + msg = ( + "Too many dimension names differ between coordinate " + "variable {!r} and the bounds variable {!r}. " + "Expected 1, got {}." + ) + raise ValueError( + msg.format( + str(cf_coord_var.cf_name), + str(cf_bounds_var.cf_name), + len(vertex_dim_names), + ) + ) + vertex_dim = cf_bounds_var.dimensions.index(*vertex_dim_names) + bounds_data = np.rollaxis( + bounds_data.view(), vertex_dim, len(bounds_data.shape) + ) + return bounds_data + + +################################################################################ +def build_dimension_coordinate( + engine, cf_coord_var, coord_name=None, coord_system=None +): + """Create a dimension coordinate (DimCoord) and add it to the cube.""" + + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + attr_units = get_attr_units(cf_coord_var, attributes) + points_data = cf_coord_var[:] + # Gracefully fill points masked array. + if ma.is_masked(points_data): + points_data = ma.filled(points_data) + msg = "Gracefully filling {!r} dimension coordinate masked points" + warnings.warn(msg.format(str(cf_coord_var.cf_name))) + + # Get any coordinate bounds. + cf_bounds_var, climatological = get_cf_bounds_var(cf_coord_var) + if cf_bounds_var is not None: + bounds_data = cf_bounds_var[:] + # Gracefully fill bounds masked array. + if ma.is_masked(bounds_data): + bounds_data = ma.filled(bounds_data) + msg = "Gracefully filling {!r} dimension coordinate masked bounds" + warnings.warn(msg.format(str(cf_coord_var.cf_name))) + # Handle transposed bounds where the vertex dimension is not + # the last one. Test based on shape to support different + # dimension names. + if cf_bounds_var.shape[:-1] != cf_coord_var.shape: + bounds_data = reorder_bounds_data( + bounds_data, cf_bounds_var, cf_coord_var + ) + else: + bounds_data = None + + # Determine whether the coordinate is circular. + circular = False + if ( + points_data.ndim == 1 + and coord_name in [CF_VALUE_STD_NAME_LON, CF_VALUE_STD_NAME_GRID_LON] + and cf_units.Unit(attr_units) + in [cf_units.Unit("radians"), cf_units.Unit("degrees")] + ): + modulus_value = cf_units.Unit(attr_units).modulus + circular = iris.util._is_circular( + points_data, modulus_value, bounds=bounds_data + ) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the coordinate being built. + common_dims = [ + dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names( + cf_coord_var, coord_name, attributes + ) + + # Create the coordinate. + try: + coord = iris.coords.DimCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + bounds=bounds_data, + attributes=attributes, + coord_system=coord_system, + circular=circular, + climatological=climatological, + ) + except ValueError as e_msg: + # Attempt graceful loading. + coord = iris.coords.AuxCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + bounds=bounds_data, + attributes=attributes, + coord_system=coord_system, + climatological=climatological, + ) + cube.add_aux_coord(coord, data_dims) + msg = ( + "Failed to create {name!r} dimension coordinate: {error}\n" + "Gracefully creating {name!r} auxiliary coordinate instead." + ) + warnings.warn(msg.format(name=str(cf_coord_var.cf_name), error=e_msg)) + else: + # Add the dimension coordinate to the cube. + if data_dims: + cube.add_dim_coord(coord, data_dims) + else: + # Scalar coords are placed in the aux_coords container. + cube.add_aux_coord(coord, data_dims) + + # Update the coordinate to CF-netCDF variable mapping. + engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) + + +################################################################################ +def build_auxiliary_coordinate( + engine, cf_coord_var, coord_name=None, coord_system=None +): + """Create an auxiliary coordinate (AuxCoord) and add it to the cube.""" + + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_coord_var, attributes) + + # Get any coordinate point data. + if isinstance(cf_coord_var, cf.CFLabelVariable): + points_data = cf_coord_var.cf_label_data(cf_var) + else: + points_data = _get_cf_var_data(cf_coord_var, engine.filename) + + # Get any coordinate bounds. + cf_bounds_var, climatological = get_cf_bounds_var(cf_coord_var) + if cf_bounds_var is not None: + bounds_data = _get_cf_var_data(cf_bounds_var, engine.filename) + + # Handle transposed bounds where the vertex dimension is not + # the last one. Test based on shape to support different + # dimension names. + if cf_bounds_var.shape[:-1] != cf_coord_var.shape: + # Resolving the data to a numpy array (i.e. *not* masked) for + # compatibility with array creators (i.e. dask) + bounds_data = np.asarray(bounds_data) + bounds_data = reorder_bounds_data( + bounds_data, cf_bounds_var, cf_coord_var + ) + else: + bounds_data = None + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the coordinate being built. + common_dims = [ + dim for dim in cf_coord_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names( + cf_coord_var, coord_name, attributes + ) + + # Create the coordinate + coord = iris.coords.AuxCoord( + points_data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + bounds=bounds_data, + attributes=attributes, + coord_system=coord_system, + climatological=climatological, + ) + + # Add it to the cube + cube.add_aux_coord(coord, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["coordinates"].append((coord, cf_coord_var.cf_name)) + + +################################################################################ +def build_cell_measures(engine, cf_cm_var): + """Create a CellMeasure instance and add it to the cube.""" + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_cm_var, attributes) + + # Get (lazy) content array + data = _get_cf_var_data(cf_cm_var, engine.filename) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the coordinate being built. + common_dims = [ + dim for dim in cf_cm_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names(cf_cm_var, None, attributes) + + # Obtain the cf_measure. + measure = cf_cm_var.cf_measure + + # Create the CellMeasure + cell_measure = iris.coords.CellMeasure( + data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes, + measure=measure, + ) + + # Add it to the cube + cube.add_cell_measure(cell_measure, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["cell_measures"].append( + (cell_measure, cf_cm_var.cf_name) + ) + + +################################################################################ +def build_ancil_var(engine, cf_av_var): + """Create an AncillaryVariable instance and add it to the cube.""" + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_av_var, attributes) + + # Get (lazy) content array + data = _get_cf_var_data(cf_av_var, engine.filename) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the AV being built. + common_dims = [ + dim for dim in cf_av_var.dimensions if dim in cf_var.dimensions + ] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names(cf_av_var, None, attributes) + + # Create the AncillaryVariable + av = iris.coords.AncillaryVariable( + data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes, + ) + + # Add it to the cube + cube.add_ancillary_variable(av, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts["ancillary_variables"].append((av, cf_av_var.cf_name)) + + +################################################################################ +def _is_lat_lon( + cf_var, ud_units, std_name, std_name_grid, axis_name, prefixes +): + """ + Determine whether the CF coordinate variable is a latitude/longitude variable. + + Ref: [CF] Section 4.1 Latitude Coordinate. + [CF] Section 4.2 Longitude Coordinate. + + """ + is_valid = False + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + + if attr_units is not None: + attr_units = attr_units.lower() + is_valid = attr_units in ud_units + + # Special case - Check for rotated pole. + if attr_units == "degrees": + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + if attr_std_name is not None: + is_valid = attr_std_name.lower() == std_name_grid + else: + is_valid = False + # TODO: check that this interpretation of axis is correct. + attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) + if attr_axis is not None: + is_valid = attr_axis.lower() == axis_name + else: + # Alternative is to check standard_name or axis. + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + + if attr_std_name is not None: + attr_std_name = attr_std_name.lower() + is_valid = attr_std_name in [std_name, std_name_grid] + if not is_valid: + is_valid = any( + [attr_std_name.startswith(prefix) for prefix in prefixes] + ) + else: + attr_axis = getattr(cf_var, CF_ATTR_AXIS, None) + + if attr_axis is not None: + is_valid = attr_axis.lower() == axis_name + + return is_valid + + +################################################################################ +def is_latitude(engine, cf_name): + """Determine whether the CF coordinate variable is a latitude variable.""" + cf_var = engine.cf_var.cf_group[cf_name] + return _is_lat_lon( + cf_var, + UD_UNITS_LAT, + CF_VALUE_STD_NAME_LAT, + CF_VALUE_STD_NAME_GRID_LAT, + CF_VALUE_AXIS_Y, + ["lat", "rlat"], + ) + + +################################################################################ +def is_longitude(engine, cf_name): + """Determine whether the CF coordinate variable is a longitude variable.""" + cf_var = engine.cf_var.cf_group[cf_name] + return _is_lat_lon( + cf_var, + UD_UNITS_LON, + CF_VALUE_STD_NAME_LON, + CF_VALUE_STD_NAME_GRID_LON, + CF_VALUE_AXIS_X, + ["lon", "rlon"], + ) + + +################################################################################ +def is_projection_x_coordinate(engine, cf_name): + """ + Determine whether the CF coordinate variable is a + projection_x_coordinate variable. + + """ + cf_var = engine.cf_var.cf_group[cf_name] + attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( + cf_var, CF_ATTR_LONG_NAME, None + ) + return attr_name == CF_VALUE_STD_NAME_PROJ_X + + +################################################################################ +def is_projection_y_coordinate(engine, cf_name): + """ + Determine whether the CF coordinate variable is a + projection_y_coordinate variable. + + """ + cf_var = engine.cf_var.cf_group[cf_name] + attr_name = getattr(cf_var, CF_ATTR_STD_NAME, None) or getattr( + cf_var, CF_ATTR_LONG_NAME, None + ) + return attr_name == CF_VALUE_STD_NAME_PROJ_Y + + +################################################################################ +def is_time(engine, cf_name): + """ + Determine whether the CF coordinate variable is a time variable. + + Ref: [CF] Section 4.4 Time Coordinate. + + """ + cf_var = engine.cf_var.cf_group[cf_name] + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + attr_axis = getattr(cf_var, CF_ATTR_AXIS, "") + try: + is_time_reference = cf_units.Unit(attr_units or 1).is_time_reference() + except ValueError: + is_time_reference = False + + return is_time_reference and ( + attr_std_name == "time" or attr_axis.lower() == CF_VALUE_AXIS_T + ) + + +################################################################################ +def is_time_period(engine, cf_name): + """Determine whether the CF coordinate variable represents a time period.""" + is_valid = False + cf_var = engine.cf_var.cf_group[cf_name] + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + + if attr_units is not None: + try: + is_valid = cf_units.is_time(attr_units) + except ValueError: + is_valid = False + + return is_valid + + +################################################################################ +def is_grid_mapping(engine, cf_name, grid_mapping): + """Determine whether the CF grid mapping variable is of the appropriate type.""" + + is_valid = False + cf_var = engine.cf_var.cf_group[cf_name] + attr_mapping_name = getattr(cf_var, CF_ATTR_GRID_MAPPING_NAME, None) + + if attr_mapping_name is not None: + is_valid = attr_mapping_name.lower() == grid_mapping + + return is_valid + + +################################################################################ +def _is_rotated(engine, cf_name, cf_attr_value): + """Determine whether the CF coordinate variable is rotated.""" + + is_valid = False + cf_var = engine.cf_var.cf_group[cf_name] + attr_std_name = getattr(cf_var, CF_ATTR_STD_NAME, None) + + if attr_std_name is not None: + is_valid = attr_std_name.lower() == cf_attr_value + else: + attr_units = getattr(cf_var, CF_ATTR_UNITS, None) + if attr_units is not None: + is_valid = attr_units.lower() == "degrees" + + return is_valid + + +################################################################################ +def is_rotated_latitude(engine, cf_name): + """Determine whether the CF coodinate variable is rotated latitude.""" + return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LAT) + + +############################################################################### +def is_rotated_longitude(engine, cf_name): + """Determine whether the CF coordinate variable is rotated longitude.""" + return _is_rotated(engine, cf_name, CF_VALUE_STD_NAME_GRID_LON) + + +################################################################################ +def has_supported_mercator_parameters(engine, cf_name): + """Determine whether the CF grid mapping variable has the supported + values for the parameters of the Mercator projection.""" + + is_valid = True + cf_grid_var = engine.cf_var.cf_group[cf_name] + + false_easting = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_EASTING, None) + false_northing = getattr(cf_grid_var, CF_ATTR_GRID_FALSE_NORTHING, None) + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + standard_parallel = getattr( + cf_grid_var, CF_ATTR_GRID_STANDARD_PARALLEL, None + ) + + if false_easting is not None and false_easting != 0: + warnings.warn( + "False eastings other than 0.0 not yet supported " + "for Mercator projections" + ) + is_valid = False + if false_northing is not None and false_northing != 0: + warnings.warn( + "False northings other than 0.0 not yet supported " + "for Mercator projections" + ) + is_valid = False + if ( + scale_factor_at_projection_origin is not None + and scale_factor_at_projection_origin != 1 + ): + warnings.warn( + "Scale factors other than 1.0 not yet supported for " + "Mercator projections" + ) + is_valid = False + if standard_parallel is not None and standard_parallel != 0: + warnings.warn( + "Standard parallels other than 0.0 not yet " + "supported for Mercator projections" + ) + is_valid = False + + return is_valid + + +################################################################################ +def has_supported_stereographic_parameters(engine, cf_name): + """Determine whether the CF grid mapping variable has a value of 1.0 + for the scale_factor_at_projection_origin attribute.""" + + is_valid = True + cf_grid_var = engine.cf_var.cf_group[cf_name] + + scale_factor_at_projection_origin = getattr( + cf_grid_var, CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN, None + ) + + if ( + scale_factor_at_projection_origin is not None + and scale_factor_at_projection_origin != 1 + ): + warnings.warn( + "Scale factors other than 1.0 not yet supported for " + "stereographic projections" + ) + is_valid = False + + return is_valid diff --git a/lib/iris/fileformats/_nc_load_rules/rules.py b/lib/iris/fileformats/_nc_load_rules/rules.py new file mode 100644 index 0000000000..b7a83e9fa4 --- /dev/null +++ b/lib/iris/fileformats/_nc_load_rules/rules.py @@ -0,0 +1,304 @@ +""" +Replacement code for the Pyke rules. + +For now, we are still emulating various aspects of how our original Pyke-based +code used the Pyke 'engine' to hold translation data, both Pyke-specific and +not : +1) basic details from the iris.fileformats.cf analysis of the file are + recorded before translating each output cube, using + "engine.assert_case_specific_fact(name, args)". + +2) this is also used to store intermediate info passed between rules, which + used to done with a "facts_cf.provides" statement in rule actions. + +3) Iris-specific info is stored in our own additional properties stored in + extra properties added to the engine object : + engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename + +Our "rules" are just action routines. +The master 'run_rules' routine decides which to call based on the info recorded +when processing each cube output. It does this in a simple explicit way, which +doesn't use any clever chaining, "trigger conditions" or rules-like behaviour. + +FOR NOW: we are still using intermediate facts to carry information between +rules. + +""" + +from . import helpers as hh +from functools import wraps + + +def convert_rulesfuncname_to_rulename(func_name): + # Given the name of a rules-func, return the name of the rule. + funcname_prefix = "rule_" + rulename_prefix = "fc_" # To match existing behaviours + rule_name = func_name + if rule_name.startswith(funcname_prefix): + rule_name = rule_name[len(funcname_prefix) :] + if not rule_name.startswith(rulename_prefix): + rule_name = rulename_prefix + rule_name + return rule_name + + +def _default_rulenamesfunc(func_name): + # A default function to deduce the rules-name from a rule-func-name. + # This (default) one assumes there are *no* additional call fact_arglist, + # i.e. the function does *not* take parameters to implement multiple rules. + rule_name = convert_rulesfuncname_to_rulename(func_name) + return rule_name + + +def rules_function(func): + # Wrap a rules function with some standard behaviour. + # Notably : engages with the rules logging process. + @wraps(func) + def inner(engine, *args, **kwargs): + # Call the original rules-func + rule_name = func(engine, *args, **kwargs) + if rule_name is None: + # Work out the corresponding rule name, and log it. + # Note: a rules returns a name string, which identifies it, + # but also may vary depending on whether it successfully + # triggered, and if so what it mathched. + rule_name = _default_rulenamesfunc(func.__name__) + engine.rule_triggered.add(rule_name) + + func._rulenames_func = _default_rulenamesfunc + return inner + + +@rules_function +def rule_default(engine): + hh.build_cube_metadata(engine) + + +_grid_types_to_checker_builder = { + hh.CF_GRID_MAPPING_LAT_LON: (None, hh.build_coordinate_system), + hh.CF_GRID_MAPPING_ROTATED_LAT_LON: ( + None, + hh.build_rotated_coordinate_system, + ), + hh.CF_GRID_MAPPING_MERCATOR: ( + hh.has_supported_mercator_parameters, + hh.build_mercator_coordinate_system, + ), + hh.CF_GRID_MAPPING_TRANSVERSE: ( + None, + hh.build_transverse_mercator_coordinate_system, + ), + hh.CF_GRID_MAPPING_STEREO: ( + hh.has_supported_stereographic_parameters, + hh.build_stereographic_coordinate_system, + ), + hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL: ( + None, + hh.build_lambert_conformal_coordinate_system, + ), + hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL: ( + None, + hh.build_lambert_azimuthal_equal_area_coordinate_system, + ), + hh.CF_GRID_MAPPING_ALBERS: ( + None, + hh.build_albers_equal_area_coordinate_system, + ), + hh.CF_GRID_MAPPING_VERTICAL: ( + None, + hh.build_vertical_perspective_coordinate_system, + ), + hh.CF_GRID_MAPPING_GEOSTATIONARY: ( + None, + hh.build_geostationary_coordinate_system, + ), +} + + +@rules_function +def rule_provides_grid_mapping(engine, gridmapping_fact): + (var_name,) = gridmapping_fact + rule_name = "fc_provides_grid_mapping" + cf_var = engine.cf_var.cf_group[var_name] + grid_mapping_type = getattr(cf_var, hh.CF_ATTR_GRID_MAPPING_NAME, None) + succeed = True + if grid_mapping_type is None: + succeed = False + rule_name += " --FAILED(no grid-mapping attr)" + else: + grid_mapping_type = grid_mapping_type.lower() + if succeed: + if grid_mapping_type in _grid_types_to_checker_builder: + checker, builder = _grid_types_to_checker_builder[ + grid_mapping_type + ] + rule_name += f"_({grid_mapping_type})" + else: + succeed = False + rule_name += f" --FAILED(unhandled type {grid_mapping_type})" + # We DON'T call this, as we already identified the type in the call. + # if succeed and not is_grid_mapping(engine, var_name, grid_mapping_type): + # succeed = False + # rule_name += f' --(FAILED is_grid_mapping)' + if succeed: + if checker is not None and not checker(engine, grid_mapping_type): + succeed = False + rule_name += f" --(FAILED check {checker.__name__})" + + if succeed: + coordinate_system = builder(engine, cf_var) + # Check there is not an existing one. + old_gridtype_fact = engine.fact_list("grid-type") + if old_gridtype_fact: + (old_gridtype,) = old_gridtype_fact + succeed = False + rule_name += ( + f" --(FAILED overwrite coord-sytem " + f"{old_gridtype} with {grid_mapping_type})" + ) + if succeed: + engine.cube_parts["coordinate_system"] = coordinate_system + engine.add_fact("grid-type", (grid_mapping_type,)) + + return rule_name + + +@rules_function +def rule_provides_coordinate(engine, dimcoord_fact): + (var_name,) = dimcoord_fact + + # Identify the coord type + # N.B. *only* to "name" the rule, for debug : no functional need. + coord_type = None + if hh.is_latitude(engine, var_name): + coord_type = "latitude" + elif hh.is_longitude(engine, var_name): + coord_type = "longitude" + elif hh.is_rotated_latitude(engine, var_name): + coord_type = "rotated_latitude" + elif hh.is_rotated_longitude(engine, var_name): + coord_type = "rotated_longitude" + elif hh.is_time(engine, var_name): + coord_type = "time" + elif hh.is_time_period(engine, var_name): + coord_type = "time_period" + elif hh.is_projection_x_coordinate(engine, var_name): + coord_type = "projection_x" + elif hh.is_projection_y_coordinate(engine, var_name): + coord_type = "projection_y" + + if coord_type is None: + # Not identified as a specific known coord_type. + # N.B. in the original rules, this does *not* trigger separate + # 'provides' and 'build' phases : there is just a single + # 'fc_default_coordinate' rule. + # Rationalise this for now by making it like the others. + # FOR NOW: ~matching old code, but they could *all* be simplified. + # TODO: combine 2 operation into 1 for ALL of these. + coord_type = "miscellaneous" + rule_name = "fc_default_coordinate_(provide-phase)" + else: + rule_name = f"fc_provides_coordinate_({coord_type})" + + engine.add_fact("provides-coordinate-(oftype)", (coord_type, var_name)) + return rule_name + + +_coordtype_to_gridtype_coordname = { + "latitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LAT), + "longitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LON), + "rotated_latitude": ( + "rotated_latitude_longitude", + hh.CF_VALUE_STD_NAME_GRID_LAT, + ), + "rotated_longitude": ( + "rotated_latitude_longitude", + hh.CF_VALUE_STD_NAME_GRID_LON, + ), + "projection_x": ("projected", hh.CF_VALUE_STD_NAME_PROJ_X), + "projection_y": ("projected", hh.CF_VALUE_STD_NAME_PROJ_Y), + "time": (None, None), + "time_period": (None, None), + "miscellaneous": (None, None), +} + + +@rules_function +def rule_build_coordinate(engine, providescoord_fact): + coord_type, var_name = providescoord_fact + cf_var = engine.cf_var.cf_group[var_name] + rule_name = f"fc_build_coordinate_{coord_type}" + grid_type, coord_name = _coordtype_to_gridtype_coordname[coord_type] + succeed = True + coord_system = None + if grid_type is not None: + if coord_type not in ("latitude", "longitude"): + # There needs to be the right sort of coordinate system + coord_system = engine.cube_parts.get("coordinate_system") + if coord_system is None: + succeed = False + rule_name += " --FAILED(no coord-system)" + # TODO else: we ***asssume*** coord-system is the right type ?? + if succeed: + hh.build_dimension_coordinate( + engine, cf_var, coord_name=coord_name, coord_system=coord_system + ) + + return rule_name + + +@rules_function +def rule_build_auxiliary_coordinate(engine, auxcoord_fact): + (var_name,) = auxcoord_fact + rule_name = "fc_build_auxiliary_coordinate" + + # FOR NOW: attempt to identify type, though it only affects rule-name? + coord_type = "" # unidentified : can be OK + if hh.is_time(engine, var_name): + coord_type = "time" + elif hh.is_time_period(engine, var_name): + coord_type = "time_period" + elif hh.is_longitude(engine, var_name): + coord_type = "longitude" + if hh.is_rotated_longitude(engine, var_name): + coord_type += "_rotated" + elif hh.is_latitude(engine, var_name): + coord_type = "latitude" + if hh.is_rotated_latitude(engine, var_name): + coord_type += "_rotated" + + if coord_type: + rule_name += f"_{coord_type}" + + cf_var = engine.cf_var.cf_group.auxiliary_coordinates[var_name] + hh.build_auxiliary_coordinate( + engine, cf_var, coord_name=hh.CF_VALUE_STD_NAME_GRID_LON + ) + + return rule_name + + +def run_rules(engine): + # default (all cubes) rule, always runs + rule_default(engine) # This should run the default rules. + + # deal with grid-mappings + grid_mapping_facts = engine.fact_list("grid_mapping") + for grid_mapping_fact in grid_mapping_facts: + rule_provides_grid_mapping(engine, grid_mapping_fact) + + # identify + record aka "PROVIDE" specific named coordinates + # N.B. cf.py id-d these as coords NOT aux-coords (stored separately) + # TODO: can probably remove this step ?? + dimcoord_facts = engine.fact_list("coordinate") + for dimcoord_fact in dimcoord_facts: + rule_provides_coordinate(engine, dimcoord_fact) + + # build coordinates + providescoord_facts = engine.fact_list("provides-coordinate-(oftype)") + for providescoord_fact in providescoord_facts: + rule_build_coordinate(engine, providescoord_fact) + + # build aux-coords + auxcoord_facts = engine.fact_list("auxiliary_coordinate") + for auxcoord_fact in auxcoord_facts: + rule_build_auxiliary_coordinate(engine, auxcoord_fact) diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index 206f7526c6..da7a1ae451 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -384,7 +384,7 @@ def coord(self, name): return result -def _pyke_kb_engine(): +def _pyke_kb_engine_real(): """Return the PyKE knowledge engine for CF->cube conversion.""" pyke_dir = os.path.join(os.path.dirname(__file__), "_pyke_rules") @@ -415,6 +415,21 @@ def _pyke_kb_engine(): return engine +LOAD_PYKE = True + + +def _pyke_kb_engine(): + """Return a knowledge engine, or replacement object.""" + if LOAD_PYKE: + engine = _pyke_kb_engine_real() + else: + # Deferred import to avoid circularity. + import iris.fileformats._nc_load_rules.engine as nonpyke_engine + + engine = nonpyke_engine.Engine() + return engine + + class NetCDFDataProxy: """A reference to the data payload of a single NetCDF file variable.""" @@ -584,6 +599,17 @@ def _get_cf_var_data(cf_var, filename): return as_lazy_data(proxy, chunks=chunks) +class OrderedAddableList(list): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._n_add = 0 + + def add(self, msg): + self._n_add += 1 + n_add = self._n_add + self.append(f"#{n_add:03d} : {msg}") + + def _load_cube(engine, cf, cf_var, filename): """Create the cube associated with the CF-netCDF data variable.""" data = _get_cf_var_data(cf_var, filename) @@ -597,7 +623,7 @@ def _load_cube(engine, cf, cf_var, filename): engine.cube = cube engine.cube_parts = {} engine.requires = {} - engine.rule_triggered = set() + engine.rule_triggered = OrderedAddableList() # set() engine.filename = filename # Assert any case-specific facts. diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 2d1b4a53d5..efbd083964 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -41,9 +41,13 @@ @tests.skip_data class TestNetCDFLoad(tests.IrisTest): def setUp(self): + iris.fileformats.netcdf.DEBUG = True + iris.fileformats.netcdf.LOAD_PYKE = False self.tmpdir = None def tearDown(self): + iris.fileformats.netcdf.DEBUG = False + iris.fileformats.netcdf.LOAD_PYKE = True if self.tmpdir is not None: shutil.rmtree(self.tmpdir) @@ -127,11 +131,24 @@ def test_load_global_xyzt_gems_iter(self): def test_load_rotated_xy_land(self): # Test loading single xy rotated pole CF-netCDF file. + iris.fileformats.netcdf.LOAD_PYKE = True + print("Pyke version:") cube = iris.load_cube( tests.get_data_path( ("NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc") ) ) + print(cube) + iris.fileformats.netcdf.LOAD_PYKE = False + print("") + print("NON-Pyke version:") + cube = iris.load_cube( + tests.get_data_path( + ("NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc") + ) + ) + print(cube) + # Make sure the AuxCoords have lazy data. self.assertTrue(is_lazy_data(cube.coord("latitude").core_points())) self.assertCML(cube, ("netcdf", "netcdf_rotated_xy_land.cml")) From 9486b692d07f93a42bdbad37183dfe13336d6161 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 21 May 2021 11:48:28 +0100 Subject: [PATCH 02/35] Rename some things + add comments. --- .../_nc_load_rules/{rules.py => actions.py} | 80 +++++++++++-------- lib/iris/fileformats/_nc_load_rules/engine.py | 9 ++- lib/iris/tests/test_netcdf.py | 2 + 3 files changed, 55 insertions(+), 36 deletions(-) rename lib/iris/fileformats/_nc_load_rules/{rules.py => actions.py} (78%) diff --git a/lib/iris/fileformats/_nc_load_rules/rules.py b/lib/iris/fileformats/_nc_load_rules/actions.py similarity index 78% rename from lib/iris/fileformats/_nc_load_rules/rules.py rename to lib/iris/fileformats/_nc_load_rules/actions.py index b7a83e9fa4..5ce683b733 100644 --- a/lib/iris/fileformats/_nc_load_rules/rules.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -16,12 +16,15 @@ engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename Our "rules" are just action routines. -The master 'run_rules' routine decides which to call based on the info recorded -when processing each cube output. It does this in a simple explicit way, which -doesn't use any clever chaining, "trigger conditions" or rules-like behaviour. +The top-level 'run_actions' routine decides which actions to call, based on the +info recorded when processing each cube output. It does this in a simple +explicit way, which doesn't use any clever chaining, "trigger conditions" or +other rule-type logic. -FOR NOW: we are still using intermediate facts to carry information between -rules. +TODO: remove the use of intermediate "facts" to carry information between +actions. This mimics older behaviour, so is still useful while we are still +comparing behaviour with the old Pyke rules (debugging). But once that is no +longer useful, this can be considerably simplified. """ @@ -29,9 +32,9 @@ from functools import wraps -def convert_rulesfuncname_to_rulename(func_name): - # Given the name of a rules-func, return the name of the rule. - funcname_prefix = "rule_" +def convert_actionname_to_rulename(func_name): + # Given the name of an action-func, return the name of the rule. + funcname_prefix = "action_" rulename_prefix = "fc_" # To match existing behaviours rule_name = func_name if rule_name.startswith(funcname_prefix): @@ -42,15 +45,13 @@ def convert_rulesfuncname_to_rulename(func_name): def _default_rulenamesfunc(func_name): - # A default function to deduce the rules-name from a rule-func-name. - # This (default) one assumes there are *no* additional call fact_arglist, - # i.e. the function does *not* take parameters to implement multiple rules. - rule_name = convert_rulesfuncname_to_rulename(func_name) + # A simple default function to deduce the rules-name from an action-name. + rule_name = convert_actionname_to_rulename(func_name) return rule_name -def rules_function(func): - # Wrap a rules function with some standard behaviour. +def action_function(func): + # Wrap an action function with some standard behaviour. # Notably : engages with the rules logging process. @wraps(func) def inner(engine, *args, **kwargs): @@ -58,9 +59,9 @@ def inner(engine, *args, **kwargs): rule_name = func(engine, *args, **kwargs) if rule_name is None: # Work out the corresponding rule name, and log it. - # Note: a rules returns a name string, which identifies it, + # Note: an action returns a name string, which identifies it, # but also may vary depending on whether it successfully - # triggered, and if so what it mathched. + # triggered, and if so what it matched. rule_name = _default_rulenamesfunc(func.__name__) engine.rule_triggered.add(rule_name) @@ -68,11 +69,16 @@ def inner(engine, *args, **kwargs): return inner -@rules_function -def rule_default(engine): +@action_function +def action_default(engine): hh.build_cube_metadata(engine) +# Lookup table used by 'action_provides_grid_mapping'. +# Maps each supported CF grid-mapping-name to a pair of handling ("helper") +# routines: +# (@0) a validity-checker (or None) +# (@1) a coord-system builder function. _grid_types_to_checker_builder = { hh.CF_GRID_MAPPING_LAT_LON: (None, hh.build_coordinate_system), hh.CF_GRID_MAPPING_ROTATED_LAT_LON: ( @@ -114,8 +120,8 @@ def rule_default(engine): } -@rules_function -def rule_provides_grid_mapping(engine, gridmapping_fact): +@action_function +def action_provides_grid_mapping(engine, gridmapping_fact): (var_name,) = gridmapping_fact rule_name = "fc_provides_grid_mapping" cf_var = engine.cf_var.cf_group[var_name] @@ -162,8 +168,8 @@ def rule_provides_grid_mapping(engine, gridmapping_fact): return rule_name -@rules_function -def rule_provides_coordinate(engine, dimcoord_fact): +@action_function +def action_provides_coordinate(engine, dimcoord_fact): (var_name,) = dimcoord_fact # Identify the coord type @@ -203,6 +209,14 @@ def rule_provides_coordinate(engine, dimcoord_fact): return rule_name +# Lookup table used by 'action_build_dimension_coordinate'. +# Maps each supported coordinate-type name (a rules-internal concept) to a pair +# of information values : +# (@0) the CF grid_mapping_name (or None) +# If set, the cube should have a coord-system, which is set on the +# resulting coordinate. If None, the coord has no coord_system. +# (@1) an (optional) fixed standard-name for the coordinate, or None +# If None, the coordinate name is copied from the source variable _coordtype_to_gridtype_coordname = { "latitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LAT), "longitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LON), @@ -222,8 +236,8 @@ def rule_provides_coordinate(engine, dimcoord_fact): } -@rules_function -def rule_build_coordinate(engine, providescoord_fact): +@action_function +def action_build_dimension_coordinate(engine, providescoord_fact): coord_type, var_name = providescoord_fact cf_var = engine.cf_var.cf_group[var_name] rule_name = f"fc_build_coordinate_{coord_type}" @@ -246,8 +260,8 @@ def rule_build_coordinate(engine, providescoord_fact): return rule_name -@rules_function -def rule_build_auxiliary_coordinate(engine, auxcoord_fact): +@action_function +def action_build_auxiliary_coordinate(engine, auxcoord_fact): (var_name,) = auxcoord_fact rule_name = "fc_build_auxiliary_coordinate" @@ -277,28 +291,28 @@ def rule_build_auxiliary_coordinate(engine, auxcoord_fact): return rule_name -def run_rules(engine): - # default (all cubes) rule, always runs - rule_default(engine) # This should run the default rules. +def run_actions(engine): + # default (all cubes) action, always runs + action_default(engine) # This should run the default rules. # deal with grid-mappings grid_mapping_facts = engine.fact_list("grid_mapping") for grid_mapping_fact in grid_mapping_facts: - rule_provides_grid_mapping(engine, grid_mapping_fact) + action_provides_grid_mapping(engine, grid_mapping_fact) # identify + record aka "PROVIDE" specific named coordinates # N.B. cf.py id-d these as coords NOT aux-coords (stored separately) # TODO: can probably remove this step ?? dimcoord_facts = engine.fact_list("coordinate") for dimcoord_fact in dimcoord_facts: - rule_provides_coordinate(engine, dimcoord_fact) + action_provides_coordinate(engine, dimcoord_fact) # build coordinates providescoord_facts = engine.fact_list("provides-coordinate-(oftype)") for providescoord_fact in providescoord_facts: - rule_build_coordinate(engine, providescoord_fact) + action_build_dimension_coordinate(engine, providescoord_fact) # build aux-coords auxcoord_facts = engine.fact_list("auxiliary_coordinate") for auxcoord_fact in auxcoord_facts: - rule_build_auxiliary_coordinate(engine, auxcoord_fact) + action_build_auxiliary_coordinate(engine, auxcoord_fact) diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index 780858df81..7531345b88 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -1,7 +1,10 @@ """ -A simple mimic of the Pyke 'knwoledge_engine', for interfacing to the routines +A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines in 'iris.fileformats.netcdf' with minimal changes to that code. +This allows us to replace the Pyke rules operation with the simpler pure-Python +translation operations in :mod:`iris.fileformats._nc_load_rules.actions`. + The core of this is the 'Engine' class, which mimics the Pyke engine operations, as used by our code to translate each data cube. @@ -10,7 +13,7 @@ used in :meth:`iris.fileformats.netcdf.pyke_stats`. """ -from .rules import run_rules +from .actions import run_actions class FactList: @@ -74,7 +77,7 @@ def activate(self, rules_base_str=None): in :mod:`iris.fileformats.netcdf._nc_load_rules.rules`. """ - run_rules(self) + run_actions(self) def print_stats(self): """No-op, called by :meth:`iris.fileformats.netcdf.pyke_stats`.""" diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index efbd083964..9fc7fdefc1 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -628,6 +628,8 @@ def test_no_name_cube(self): class TestNetCDFSave(tests.IrisTest): def setUp(self): + iris.fileformats.netcdf.DEBUG = True + iris.fileformats.netcdf.LOAD_PYKE = False self.cubell = iris.cube.Cube( np.arange(4).reshape(2, 2), "air_temperature" ) From d1799ececfe4756aaeb17a5738bfbf6001ccf44f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 21 May 2021 15:12:39 +0100 Subject: [PATCH 03/35] Headers, tweaks, reorg test__load_cube. --- .../fileformats/_nc_load_rules/__init__.py | 17 +- .../fileformats/_nc_load_rules/actions.py | 5 + lib/iris/fileformats/_nc_load_rules/engine.py | 5 + .../fileformats/_nc_load_rules/helpers.py | 7 +- .../fileformats/netcdf/load_cube/__init__.py | 6 + .../netcdf/{ => load_cube}/test__load_cube.py | 0 .../load_cube/test__load_cube__activate.py | 161 ++++++++++++++++++ 7 files changed, 199 insertions(+), 2 deletions(-) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py rename lib/iris/tests/unit/fileformats/netcdf/{ => load_cube}/test__load_cube.py (100%) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py diff --git a/lib/iris/fileformats/_nc_load_rules/__init__.py b/lib/iris/fileformats/_nc_load_rules/__init__.py index cfbff5bc7c..baea3cf555 100644 --- a/lib/iris/fileformats/_nc_load_rules/__init__.py +++ b/lib/iris/fileformats/_nc_load_rules/__init__.py @@ -1 +1,16 @@ -# Support for replacing Pyke rules. +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Support for cube-specific CF-to-Iris translation operations. + +Interprets CF concepts identified by :mod:`iris.fileformats.cf` to add +components into loaded cubes. + +For now : the API which mimics :class:`pyke.knowledge_engine.engine`. +As this is aiming to replace the old Pyke-based logic rules. +TODO: simplify once the parallel operation with Pyke is no longer required. + +""" diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 5ce683b733..d2ebefc69e 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -1,3 +1,8 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ Replacement code for the Pyke rules. diff --git a/lib/iris/fileformats/_nc_load_rules/engine.py b/lib/iris/fileformats/_nc_load_rules/engine.py index 7531345b88..60f956d4d1 100644 --- a/lib/iris/fileformats/_nc_load_rules/engine.py +++ b/lib/iris/fileformats/_nc_load_rules/engine.py @@ -1,3 +1,8 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ A simple mimic of the Pyke 'knowledge_engine', for interfacing to the routines in 'iris.fileformats.netcdf' with minimal changes to that code. diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 9a908a95a1..0ac1cb7472 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -1,3 +1,8 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. """ All the pure-Python 'helper' functions which previously included in the Pyke rules database. @@ -246,7 +251,7 @@ def _get_ellipsoid(cf_grid_var): ################################################################################ -def build_coordinate_system(cf_grid_var): +def build_coordinate_system(engine, cf_grid_var): """Create a coordinate system from the CF-netCDF grid mapping variable.""" major, minor, inverse_flattening = _get_ellipsoid(cf_grid_var) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py new file mode 100644 index 0000000000..8bc429a906 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/__init__.py @@ -0,0 +1,6 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +"""Unit tests for the `iris.fileformats.netcdf._load_cube` function.""" diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube.py similarity index 100% rename from lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py rename to lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py new file mode 100644 index 0000000000..4afeeac429 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -0,0 +1,161 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +For now, these tests are designed to function with **either** the "old" +Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the +"new" :mod:`iris.fileformats._nc_load_rules`. +Both of those supply an "activate" call (for now : may be simplified in future). + +""" +import iris.tests as tests + +from pathlib import Path +import shutil +import subprocess +import tempfile + +from iris.fileformats.cf import CFReader +import iris.fileformats.netcdf +from iris.fileformats.netcdf import _load_cube +import iris.fileformats._nc_load_rules.engine + +""" +Testing method. +IN cf : "def _load_cube(engine, cf, cf_var, filename)" +WHERE: + - engine is a :class:`pyke.knowledge_engine.engine` + -- **OR** :class:`iris.fileformats._nc_load_rules.engine.Engine` + - cf is a CFReader + - cf_var is a CFDAtaVariable + +As it's hard to construct a suitable CFReader from scratch, it would seem +simpler (for now) to use an ACTUAL FILE. +Likewise, the easiest approach to that is with CDL and "ncgen". +To do this, we need a test "fixture" that can create suitable test files in a +temporary directory. + +""" + + +class Test__grid_mappings(tests.IrisTest): + @classmethod + def setUpClass(cls): + # # Control which testing method we are applying. + # Create a temp directory for temp files. + cls.temp_dirpath = Path(tempfile.mkdtemp()) + + @classmethod + def tearDownClass(cls): + # Destroy a temp directory for temp files. + shutil.rmtree(cls.temp_dirpath) + + def _call_with_testfile(self): + # FOR NOW: absolutely basic example. + cdl_string = r""" + netcdf test { + dimensions: + lats = 2 ; + lons = 3 ; + variables: + double phenom(lats, lons) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + double lats(lats) ; + lats:axis = "Y" ; + lats:units = "degrees_north" ; + lats:standard_name = "latitude" ; + double lons(lons) ; + lons:axis = "X" ; + lons:units = "degrees_east" ; + lons:standard_name = "longitude" ; + } + """ + cdl_string = r""" + netcdf test { + dimensions: + lats = 2 ; + lons = 3 ; + variables: + double phenom(lats, lons) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:grid_mapping = "grid" ; + double lats(lats) ; + lats:axis = "Y" ; + lats:units = "degrees" ; + lats:standard_name = "latitude" ; + double lons(lons) ; + lons:axis = "X" ; + lons:units = "degrees_east" ; + lons:standard_name = "longitude" ; + int grid ; + grid:grid_mapping_name = "latitude_longitude"; + grid:earth_radius = 6.e6 ; + data: + lats = 10., 20. ; + lons = 100., 110., 120. ; + } + """ + cdl_path = str(self.temp_dirpath / "test.cdl") + nc_path = str(self.temp_dirpath / "test.nc") + with open(cdl_path, "w") as f_out: + f_out.write(cdl_string) + # Create reference netCDF file from reference CDL. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + subprocess.check_call(command, shell=True) + + cf = CFReader(nc_path) + # Grab a data variable : FOR NOW, should be only 1 + # (cf_var,) = cf.cf_group.data_variables.values() + cf_var = cf.cf_group.data_variables["phenom"] + + use_pyke = True + if use_pyke: + engine = iris.fileformats.netcdf._pyke_kb_engine_real() + else: + engine = iris.fileformats._nc_load_rules.engine.Engine() + + iris.fileformats.netcdf.DEBUG = True + # iris.fileformats.netcdf.LOAD_PYKE = False + return _load_cube(engine, cf, cf_var, nc_path) + + def _check_result(self, cube): + self.assertEqual(cube.standard_name, "air_temperature") + self.assertEqual(cube.var_name, "phenom") + + def test_latlon(self): + options = {} + result = self._call_with_testfile(**options) + print(result) + print("coord-system = ", type(result.coord_system())) + print(" X cs = ", type(result.coord(axis="x").coord_system)) + print(" Y cs = ", type(result.coord(axis="y").coord_system)) + self._check_result(result, **options) + + +# keep for later ? +_cdl_string = r""" + netcdf test { + dimensions: + latitude = 2 ; + longitude = 3 ; + time = 2 ; + variables: + double phenom(time, latitude) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + float time(time) ; + time:units = "1" ; + time:standard_name = "time" ; + double latitude(latitude) ; + latitude:axis = "Y" ; + latitude:units = "1" ; + latitude:standard_name = "latitude" ; + } +""" From 454ac0de56cafad33761257f34f61c77f737537b Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 21 May 2021 16:45:15 +0100 Subject: [PATCH 04/35] Fix handling of coord-systems. --- .../fileformats/_nc_load_rules/actions.py | 24 ++++++++--------- .../load_cube/test__load_cube__activate.py | 26 ++----------------- 2 files changed, 13 insertions(+), 37 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index d2ebefc69e..d23a2373e9 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -245,22 +245,20 @@ def action_provides_coordinate(engine, dimcoord_fact): def action_build_dimension_coordinate(engine, providescoord_fact): coord_type, var_name = providescoord_fact cf_var = engine.cf_var.cf_group[var_name] - rule_name = f"fc_build_coordinate_{coord_type}" + rule_name = f"fc_build_coordinate_({coord_type})" grid_type, coord_name = _coordtype_to_gridtype_coordname[coord_type] - succeed = True coord_system = None if grid_type is not None: - if coord_type not in ("latitude", "longitude"): - # There needs to be the right sort of coordinate system - coord_system = engine.cube_parts.get("coordinate_system") - if coord_system is None: - succeed = False - rule_name += " --FAILED(no coord-system)" - # TODO else: we ***asssume*** coord-system is the right type ?? - if succeed: - hh.build_dimension_coordinate( - engine, cf_var, coord_name=coord_name, coord_system=coord_system - ) + # If a type is identified with a grid, use the coordinate system + # N.B. this requires each grid-type identification to validate the + # coord var (e.g. "is_longitude"). + # Non-conforming lon/lat/projection coords will be classed as + # dim-coords by cf.py, but 'action_provides_coordinate' will give them + # a coord-type of 'miscellaneous' : hence, they have no coord-system. + coord_system = engine.cube_parts.get("coordinate_system") + hh.build_dimension_coordinate( + engine, cf_var, coord_name=coord_name, coord_system=coord_system + ) return rule_name diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 4afeeac429..46228b40ae 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -88,11 +88,11 @@ def _call_with_testfile(self): phenom:grid_mapping = "grid" ; double lats(lats) ; lats:axis = "Y" ; - lats:units = "degrees" ; + lats:units = "degrees_north" ; lats:standard_name = "latitude" ; double lons(lons) ; lons:axis = "X" ; - lons:units = "degrees_east" ; + lons:units = "degrees" ; // THIS IS A BUG! lons:standard_name = "longitude" ; int grid ; grid:grid_mapping_name = "latitude_longitude"; @@ -137,25 +137,3 @@ def test_latlon(self): print(" X cs = ", type(result.coord(axis="x").coord_system)) print(" Y cs = ", type(result.coord(axis="y").coord_system)) self._check_result(result, **options) - - -# keep for later ? -_cdl_string = r""" - netcdf test { - dimensions: - latitude = 2 ; - longitude = 3 ; - time = 2 ; - variables: - double phenom(time, latitude) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - float time(time) ; - time:units = "1" ; - time:standard_name = "time" ; - double latitude(latitude) ; - latitude:axis = "Y" ; - latitude:units = "1" ; - latitude:standard_name = "latitude" ; - } -""" From 80baf8755bf3164f8b365e16a57937421db779e9 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 21 May 2021 18:44:08 +0100 Subject: [PATCH 05/35] Remove extra cdl. --- .../load_cube/test__load_cube__activate.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 46228b40ae..de08b470c8 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -57,25 +57,6 @@ def tearDownClass(cls): def _call_with_testfile(self): # FOR NOW: absolutely basic example. - cdl_string = r""" - netcdf test { - dimensions: - lats = 2 ; - lons = 3 ; - variables: - double phenom(lats, lons) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - double lats(lats) ; - lats:axis = "Y" ; - lats:units = "degrees_north" ; - lats:standard_name = "latitude" ; - double lons(lons) ; - lons:axis = "X" ; - lons:units = "degrees_east" ; - lons:standard_name = "longitude" ; - } - """ cdl_string = r""" netcdf test { dimensions: From 688c4ac19cfbaacdac38a6d4b4ed4e8aa1d53280 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 1 Jun 2021 18:43:45 +0100 Subject: [PATCH 06/35] Add more grid-mapping tests; tidy testcode structure a bit. --- .../fileformats/_nc_load_rules/actions.py | 26 ++- .../load_cube/test__load_cube__activate.py | 179 +++++++++++++++--- 2 files changed, 175 insertions(+), 30 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index d23a2373e9..8dbc637f2a 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -14,10 +14,10 @@ "engine.assert_case_specific_fact(name, args)". 2) this is also used to store intermediate info passed between rules, which - used to done with a "facts_cf.provides" statement in rule actions. + used to be done with a "facts_cf.provides" statement in rule actions. -3) Iris-specific info is stored in our own additional properties stored in - extra properties added to the engine object : +3) Iris-specific info is (still) stored in additional properties created on + the engine object : engine.cf_var, .cube, .cube_parts, .requires, .rule_triggered, .filename Our "rules" are just action routines. @@ -26,6 +26,17 @@ explicit way, which doesn't use any clever chaining, "trigger conditions" or other rule-type logic. +Each 'action' function can replace several similar 'rules'. +E.G. 'action_provides_grid_mapping' replaces all 'fc_provides_grid+mapping_'. +To aid debug, each returns a 'rule_name' string, indicating which original rule +this particular action call is emulating : In some cases, this may include a +textual note that this rule 'failed', aka "did not trigger", which would not be +recorded in the original implementation. + +The top-level 'run_actions' ensures that the individual rules actions are +called, with various arguments, as appropriate to ensure the whole cube is +built as it was by the original rules implementation. + TODO: remove the use of intermediate "facts" to carry information between actions. This mimics older behaviour, so is still useful while we are still comparing behaviour with the old Pyke rules (debugging). But once that is no @@ -268,7 +279,9 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): (var_name,) = auxcoord_fact rule_name = "fc_build_auxiliary_coordinate" - # FOR NOW: attempt to identify type, though it only affects rule-name? + # FOR NOW: attempt to identify type + # TODO: can maybe eventually remove this, as it only affects rule_name. + # (but could possibly retain for future debugging purposes) coord_type = "" # unidentified : can be OK if hh.is_time(engine, var_name): coord_type = "time" @@ -300,11 +313,14 @@ def run_actions(engine): # deal with grid-mappings grid_mapping_facts = engine.fact_list("grid_mapping") + # For now, there should be at most *one* of these. + assert len(grid_mapping_facts) in (0, 1) for grid_mapping_fact in grid_mapping_facts: action_provides_grid_mapping(engine, grid_mapping_fact) # identify + record aka "PROVIDE" specific named coordinates - # N.B. cf.py id-d these as coords NOT aux-coords (stored separately) + # N.B. cf.py has identified that these are dim-coords, NOT aux-coords + # (which are recorded separately). # TODO: can probably remove this step ?? dimcoord_facts = engine.fact_list("coordinate") for dimcoord_fact in dimcoord_facts: diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index de08b470c8..396d1d6a5e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -26,7 +26,8 @@ import iris.fileformats._nc_load_rules.engine """ -Testing method. +Notes on testing method. + IN cf : "def _load_cube(engine, cf, cf_var, filename)" WHERE: - engine is a :class:`pyke.knowledge_engine.engine` @@ -43,7 +44,21 @@ """ -class Test__grid_mappings(tests.IrisTest): +class Mixin_Test__nc_load_actions: + """ + Class to make testcases for rules or actions code and check results. + + Defines standard setUp/tearDown-Class to create intermediate files in a + temporary directory. + + Testcase manufacture in _make_testcase_file', based on a simple latlon grid + example with various kwargs to control variations. + Testing in 'test_result', with various kwargs controlling expected results. + + Can also switch between testing Pyke and non-Pyke implementations (for now). + + """ + @classmethod def setUpClass(cls): # # Control which testing method we are applying. @@ -55,10 +70,33 @@ def tearDownClass(cls): # Destroy a temp directory for temp files. shutil.rmtree(cls.temp_dirpath) - def _call_with_testfile(self): - # FOR NOW: absolutely basic example. - cdl_string = r""" - netcdf test { + def make_testcase_cdl( + self, + cdl_path, + latitude_units=None, + gridmapvar_name=None, + gridmapvar_mappropertyname=None, + gridmapvar_missingradius=False, + ): + """ + Write a testcase example into a CDL file. + """ + if latitude_units is None: + latitude_units = "degrees_north" + grid_mapping_name = "grid" + g_varname = gridmapvar_name + g_mapname = gridmapvar_mappropertyname + if g_varname is None: + g_varname = grid_mapping_name + if g_mapname is None: + g_mapname = "grid_mapping_name" + if gridmapvar_missingradius: + g_radius_string = "" + else: + g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" + + cdl_string = f""" + netcdf test {{ dimensions: lats = 2 ; lons = 3 ; @@ -69,34 +107,47 @@ def _call_with_testfile(self): phenom:grid_mapping = "grid" ; double lats(lats) ; lats:axis = "Y" ; - lats:units = "degrees_north" ; + lats:units = "{latitude_units}" ; lats:standard_name = "latitude" ; double lons(lons) ; lons:axis = "X" ; - lons:units = "degrees" ; // THIS IS A BUG! + lons:units = "degrees_east" ; lons:standard_name = "longitude" ; - int grid ; - grid:grid_mapping_name = "latitude_longitude"; - grid:earth_radius = 6.e6 ; + int {g_varname} ; + {g_varname}:{g_mapname} = "latitude_longitude"; + {g_radius_string} data: lats = 10., 20. ; lons = 100., 110., 120. ; - } + }} """ - cdl_path = str(self.temp_dirpath / "test.cdl") - nc_path = str(self.temp_dirpath / "test.nc") + # print('File content:') + # print(cdl_string) + # print('------\n') with open(cdl_path, "w") as f_out: f_out.write(cdl_string) + return cdl_path + + def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=False): + """ + Load the 'phenom' data variable in a CDL testcase, as a cube. + + Using ncgen and the selected _load_cube call. + + FOR NOW: can select whether load uses Pyke (rules) or newer actions + code. + TODO: remove when Pyke implementation is gone. + + """ # Create reference netCDF file from reference CDL. command = "ncgen -o {} {}".format(nc_path, cdl_path) subprocess.check_call(command, shell=True) cf = CFReader(nc_path) # Grab a data variable : FOR NOW, should be only 1 - # (cf_var,) = cf.cf_group.data_variables.values() + cf_var = list(cf.cf_group.data_variables.values())[0] cf_var = cf.cf_group.data_variables["phenom"] - use_pyke = True if use_pyke: engine = iris.fileformats.netcdf._pyke_kb_engine_real() else: @@ -106,15 +157,93 @@ def _call_with_testfile(self): # iris.fileformats.netcdf.LOAD_PYKE = False return _load_cube(engine, cf, cf_var, nc_path) - def _check_result(self, cube): + def _run_testcase(self, **testcase_kwargs): + """ + Run a testcase with chosen optionsm returning a test cube. + + The kwargs apply to the 'make_testcase_cdl' method. + + """ + cdl_path = str(self.temp_dirpath / "test.cdl") + nc_path = cdl_path.replace(".cdl", ".nc") + self.make_testcase_cdl(cdl_path, **testcase_kwargs) + cube = self.create_cube_from_cdl(cdl_path, nc_path) + return cube + + def _check_result(self, cube, cube_no_cs=False, latitude_no_cs=False): + """ + Check key properties of a result cube. + + Various options control the expected things which are tested. + """ self.assertEqual(cube.standard_name, "air_temperature") self.assertEqual(cube.var_name, "phenom") - def test_latlon(self): - options = {} - result = self._call_with_testfile(**options) - print(result) - print("coord-system = ", type(result.coord_system())) - print(" X cs = ", type(result.coord(axis="x").coord_system)) - print(" Y cs = ", type(result.coord(axis="y").coord_system)) - self._check_result(result, **options) + lon_coord = cube.coord("longitude") + lat_coord = cube.coord("latitude") + expected_dim_coords = [lon_coord, lat_coord] + expected_aux_coords = [] + # These are exactly the coords we have. + self.assertEqual( + set(expected_dim_coords), set(cube.coords(dim_coords=True)) + ) + # These are exactly the coords we have. + self.assertEqual( + set(expected_aux_coords), set(cube.coords(dim_coords=False)) + ) + + cube_cs = cube.coord_system() + lat_cs = lat_coord.coord_system + lon_cs = lon_coord.coord_system + if cube_no_cs: + self.assertIsNone(cube_cs) + self.assertIsNone(lat_cs) + self.assertIsNone(lon_cs) + else: + self.assertEqual(lon_cs, cube_cs) + if latitude_no_cs: + self.assertIsNone(lat_cs) + else: + self.assertEqual(lat_cs, cube_cs) + + +class Test__grid_mapping(Mixin_Test__nc_load_actions, tests.IrisTest): + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_basic_latlon(self): + # A basic reference example with a lat-long grid. + result = self._run_testcase() + self._check_result(result) + + def test_missing_latlon_radius(self): + # Lat-long with a missing earth-radius causes an error. + # One of very few cases where activation may encounter an error. + # N.B. doesn't really test rule-activation, but maybe worth doing. + with self.assertRaisesRegex(ValueError, "No ellipsoid"): + self._run_testcase(gridmapvar_missingradius=True) + + def test_bad_gridmapping_nameproperty(self): + # Fix the 'grid' var so it does not register as a grid-mapping. + result = self._run_testcase(gridmapvar_mappropertyname="mappy") + self._check_result(result, cube_no_cs=True) + + def test_latlon_bad_gridmapping_varname(self): + # rename the grid-mapping variable so it is effectively 'missing'. + with self.assertWarnsRegexp("Missing.*grid mapping variable 'grid'"): + result = self._run_testcase(gridmapvar_name="grid_2") + self._check_result(result, cube_no_cs=True) + + def test_latlon_bad_latlon_unit(self): + # Check with bad latitude units : 'degrees' in place of 'degrees_north'. + result = self._run_testcase(latitude_units="degrees") + self._check_result(result, latitude_no_cs=True) + + +if __name__ == "__main__": + tests.main() From 271f65924ce5947ebcff2a98d8c4c1280fb3cac9 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 00:25:24 +0100 Subject: [PATCH 07/35] Test more grid types : rotated and some non-latlon (WIP). --- .../load_cube/test__load_cube__activate.py | 177 ++++++++++++++---- 1 file changed, 136 insertions(+), 41 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 396d1d6a5e..115455191e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -20,10 +20,12 @@ import subprocess import tempfile +import iris.coord_systems as ics from iris.fileformats.cf import CFReader import iris.fileformats.netcdf from iris.fileformats.netcdf import _load_cube import iris.fileformats._nc_load_rules.engine +import iris.fileformats._nc_load_rules.helpers as hh """ Notes on testing method. @@ -44,7 +46,7 @@ """ -class Mixin_Test__nc_load_actions: +class Mixin_Test__nc_load_actions(tests.IrisTest): """ Class to make testcases for rules or actions code and check results. @@ -77,12 +79,40 @@ def make_testcase_cdl( gridmapvar_name=None, gridmapvar_mappropertyname=None, gridmapvar_missingradius=False, + mapping_name=None, + use_bad_mapping_params=False, ): """ Write a testcase example into a CDL file. """ - if latitude_units is None: - latitude_units = "degrees_north" + # Grid-mapping options are standard-latlon, rotated, or non-latlon. + # This affects names+units of the X and Y coords. + if mapping_name is None: + # Default grid-mapping and coords are standard lat-lon. + mapping_name = hh.CF_GRID_MAPPING_LAT_LON + xco_name = hh.CF_VALUE_STD_NAME_LON + yco_name = hh.CF_VALUE_STD_NAME_LAT + xco_units = "degrees_east" + # Special cases override some of the values. + if latitude_units is None: + yco_units = "degrees_north" + else: + # Override the latitude units (to invalidate). + yco_units = latitude_units + + elif mapping_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + xco_name = hh.CF_VALUE_STD_NAME_GRID_LON + yco_name = hh.CF_VALUE_STD_NAME_GRID_LAT + xco_units = "degrees" + yco_units = "degrees" + else: + # General non-latlon coordinates + # Exactly which depends on the grid_mapping name. + xco_name = hh.CF_VALUE_STD_NAME_PROJ_X + yco_name = hh.CF_VALUE_STD_NAME_PROJ_Y + xco_units = "m" + yco_units = "m" + grid_mapping_name = "grid" g_varname = gridmapvar_name g_mapname = gridmapvar_mappropertyname @@ -94,41 +124,58 @@ def make_testcase_cdl( g_radius_string = "" else: g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" + g_string = f""" + int {g_varname} ; + {g_varname}:{g_mapname} = "{mapping_name}"; + {g_radius_string} + """ + if use_bad_mapping_params: + if mapping_name == hh.CF_GRID_MAPPING_MERCATOR: + # Mercator mapping with nonzero false-easting is unsupported. + g_string += f""" + {g_varname}:{hh.CF_ATTR_GRID_FALSE_EASTING} = 1.0 ; + """ + elif False: + pass + else: + # Key is only valid for specific grid-mappings. + assert mapping_name in ( + hh.CF_GRID_MAPPING_MERCATOR, + hh.CF_GRID_MAPPING_STEREO, + ) cdl_string = f""" netcdf test {{ dimensions: - lats = 2 ; - lons = 3 ; + yco = 2 ; + xco = 3 ; variables: - double phenom(lats, lons) ; + double phenom(yco, xco) ; phenom:standard_name = "air_temperature" ; phenom:units = "K" ; phenom:grid_mapping = "grid" ; - double lats(lats) ; - lats:axis = "Y" ; - lats:units = "{latitude_units}" ; - lats:standard_name = "latitude" ; - double lons(lons) ; - lons:axis = "X" ; - lons:units = "degrees_east" ; - lons:standard_name = "longitude" ; - int {g_varname} ; - {g_varname}:{g_mapname} = "latitude_longitude"; - {g_radius_string} + double yco(yco) ; + yco:axis = "Y" ; + yco:units = "{yco_units}" ; + yco:standard_name = "{yco_name}" ; + double xco(xco) ; + xco:axis = "X" ; + xco:units = "{xco_units}" ; + xco:standard_name = "{xco_name}" ; + {g_string} data: - lats = 10., 20. ; - lons = 100., 110., 120. ; + yco = 10., 20. ; + xco = 100., 110., 120. ; }} """ - # print('File content:') - # print(cdl_string) - # print('------\n') + print("File content:") + print(cdl_string) + print("------\n") with open(cdl_path, "w") as f_out: f_out.write(cdl_string) return cdl_path - def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=False): + def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=True): """ Load the 'phenom' data variable in a CDL testcase, as a cube. @@ -157,7 +204,7 @@ def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=False): # iris.fileformats.netcdf.LOAD_PYKE = False return _load_cube(engine, cf, cf_var, nc_path) - def _run_testcase(self, **testcase_kwargs): + def run_testcase(self, **testcase_kwargs): """ Run a testcase with chosen optionsm returning a test cube. @@ -168,9 +215,19 @@ def _run_testcase(self, **testcase_kwargs): nc_path = cdl_path.replace(".cdl", ".nc") self.make_testcase_cdl(cdl_path, **testcase_kwargs) cube = self.create_cube_from_cdl(cdl_path, nc_path) + print("\nCube:") + print(cube) + print("") return cube - def _check_result(self, cube, cube_no_cs=False, latitude_no_cs=False): + def check_result( + self, + cube, + cube_cstype=None, + cube_no_cs=False, + cube_no_xycoords=False, + latitude_no_cs=False, + ): """ Check key properties of a result cube. @@ -179,27 +236,43 @@ def _check_result(self, cube, cube_no_cs=False, latitude_no_cs=False): self.assertEqual(cube.standard_name, "air_temperature") self.assertEqual(cube.var_name, "phenom") - lon_coord = cube.coord("longitude") - lat_coord = cube.coord("latitude") - expected_dim_coords = [lon_coord, lat_coord] - expected_aux_coords = [] - # These are exactly the coords we have. + x_coords = cube.coords(axis="x") + y_coords = cube.coords(axis="y") + expected_dim_coords = x_coords + y_coords self.assertEqual( set(expected_dim_coords), set(cube.coords(dim_coords=True)) ) # These are exactly the coords we have. + if cube_no_xycoords: + self.assertEqual(expected_dim_coords, []) + x_coord = None + y_coord = None + else: + self.assertEqual(len(x_coords), 1) + (x_coord,) = x_coords + self.assertEqual(len(y_coords), 1) + (y_coord,) = y_coords + + expected_aux_coords = [] + # These are exactly the coords we have. self.assertEqual( set(expected_aux_coords), set(cube.coords(dim_coords=False)) ) cube_cs = cube.coord_system() - lat_cs = lat_coord.coord_system - lon_cs = lon_coord.coord_system + if cube_no_xycoords: + lat_cs = None + lon_cs = None + else: + lat_cs = y_coord.coord_system + lon_cs = x_coord.coord_system if cube_no_cs: self.assertIsNone(cube_cs) self.assertIsNone(lat_cs) self.assertIsNone(lon_cs) else: + if cube_cstype is not None: + self.assertIsInstance(cube_cs, cube_cstype) self.assertEqual(lon_cs, cube_cs) if latitude_no_cs: self.assertIsNone(lat_cs) @@ -218,31 +291,53 @@ def tearDownClass(cls): def test_basic_latlon(self): # A basic reference example with a lat-long grid. - result = self._run_testcase() - self._check_result(result) + result = self.run_testcase() + self.check_result(result) def test_missing_latlon_radius(self): # Lat-long with a missing earth-radius causes an error. # One of very few cases where activation may encounter an error. # N.B. doesn't really test rule-activation, but maybe worth doing. with self.assertRaisesRegex(ValueError, "No ellipsoid"): - self._run_testcase(gridmapvar_missingradius=True) + self.run_testcase(gridmapvar_missingradius=True) def test_bad_gridmapping_nameproperty(self): # Fix the 'grid' var so it does not register as a grid-mapping. - result = self._run_testcase(gridmapvar_mappropertyname="mappy") - self._check_result(result, cube_no_cs=True) + result = self.run_testcase(gridmapvar_mappropertyname="mappy") + self.check_result(result, cube_no_cs=True) def test_latlon_bad_gridmapping_varname(self): # rename the grid-mapping variable so it is effectively 'missing'. with self.assertWarnsRegexp("Missing.*grid mapping variable 'grid'"): - result = self._run_testcase(gridmapvar_name="grid_2") - self._check_result(result, cube_no_cs=True) + result = self.run_testcase(gridmapvar_name="grid_2") + self.check_result(result, cube_no_cs=True) def test_latlon_bad_latlon_unit(self): # Check with bad latitude units : 'degrees' in place of 'degrees_north'. - result = self._run_testcase(latitude_units="degrees") - self._check_result(result, latitude_no_cs=True) + result = self.run_testcase(latitude_units="degrees") + self.check_result(result, latitude_no_cs=True) + + def test_mapping_rotated(self): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON + ) + self.check_result(result, cube_cstype=ics.RotatedGeogCS) + + def test_mapping_albers(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_ALBERS) + self.check_result(result, cube_cstype=ics.AlbersEqualArea) + + def test_mapping_mercator(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_MERCATOR) + self.check_result(result, cube_cstype=ics.Mercator) + + def test_mapping_mercator__fail_unsupported(self): + with self.assertWarnsRegexp("not yet supported for Mercator"): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_MERCATOR, + use_bad_mapping_params=True, + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) if __name__ == "__main__": From de039f5f2fa4ff0e08d62e625d478679571d8174 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 10:21:51 +0100 Subject: [PATCH 08/35] Fix 'checker' call usage. --- lib/iris/fileformats/_nc_load_rules/actions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 8dbc637f2a..38b2b96d51 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -162,7 +162,7 @@ def action_provides_grid_mapping(engine, gridmapping_fact): # succeed = False # rule_name += f' --(FAILED is_grid_mapping)' if succeed: - if checker is not None and not checker(engine, grid_mapping_type): + if checker is not None and not checker(engine, var_name): succeed = False rule_name += f" --(FAILED check {checker.__name__})" From e1c56cd532c0a8ac4b1bf3bb8d87dfd29a888363 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 10:41:49 +0100 Subject: [PATCH 09/35] Reorganise testcode inheritance to provide testclasses for pyke/nonpyke. Replace 'bad mapping' control with scale-factor key. **NOTE** non-pyke bad mercatorhas outstanding test failures -- to be fixed. --- .../load_cube/test__load_cube__activate.py | 129 ++++++++++++------ 1 file changed, 86 insertions(+), 43 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 115455191e..3942f4bcb6 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -46,21 +46,45 @@ """ -class Mixin_Test__nc_load_actions(tests.IrisTest): +class Mixin_Test__nc_load_actions: """ - Class to make testcases for rules or actions code and check results. + Class to make testcases for rules or actions code, and check results. - Defines standard setUp/tearDown-Class to create intermediate files in a - temporary directory. + Defines standard setUpClass/tearDownClass methods, to create a temporary + directory for intermediate files. + NOTE: owing to peculiarities of unittest, these must be explicitly called + from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the + actual Test_XXX class which also inherits unittest.TestCase. - Testcase manufacture in _make_testcase_file', based on a simple latlon grid - example with various kwargs to control variations. - Testing in 'test_result', with various kwargs controlling expected results. + Testcases are manufactured by the '_make_testcase_cdl' method. + These are based on a 'standard simple latlon grid' example. + Various kwargs control variations on this. - Can also switch between testing Pyke and non-Pyke implementations (for now). + The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes + a result cube (by: producing cdl, converting to netcdf, and loading). + + The 'check_result' method performs various checks on the result, with + kwargs controlling the expected properties to be tested against. + This usage is *also* based on the 'standard simple latlon grid' example, + the kwargs specify expected differences from that. + + Can also test with either the Pyke(rules) or non-Pyke (actions) + implementations (for now). """ + # + # "global" test settings + # + + # whether to test 'rules' or 'actions' implementations + # TODO: remove when Pyke is gone + use_pyke = True + + # whether to output various debug info + # TODO: ?possibly? remove when development is complete + debug = False + @classmethod def setUpClass(cls): # # Control which testing method we are applying. @@ -72,7 +96,7 @@ def tearDownClass(cls): # Destroy a temp directory for temp files. shutil.rmtree(cls.temp_dirpath) - def make_testcase_cdl( + def _make_testcase_cdl( self, cdl_path, latitude_units=None, @@ -80,7 +104,7 @@ def make_testcase_cdl( gridmapvar_mappropertyname=None, gridmapvar_missingradius=False, mapping_name=None, - use_bad_mapping_params=False, + mapping_scalefactor=None, ): """ Write a testcase example into a CDL file. @@ -129,20 +153,13 @@ def make_testcase_cdl( {g_varname}:{g_mapname} = "{mapping_name}"; {g_radius_string} """ - if use_bad_mapping_params: - if mapping_name == hh.CF_GRID_MAPPING_MERCATOR: - # Mercator mapping with nonzero false-easting is unsupported. - g_string += f""" - {g_varname}:{hh.CF_ATTR_GRID_FALSE_EASTING} = 1.0 ; - """ - elif False: - pass - else: - # Key is only valid for specific grid-mappings. - assert mapping_name in ( - hh.CF_GRID_MAPPING_MERCATOR, - hh.CF_GRID_MAPPING_STEREO, - ) + if mapping_scalefactor is not None: + # Add a specific scale-factor term to the grid mappinf. + # Non-unity scale not supported for Mercator/Stereographic. + sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN + g_string += f""" + {g_varname}:{sfapo_name} = {mapping_scalefactor} ; + """ cdl_string = f""" netcdf test {{ @@ -168,23 +185,20 @@ def make_testcase_cdl( xco = 100., 110., 120. ; }} """ - print("File content:") - print(cdl_string) - print("------\n") + if self.debug: + print("File content:") + print(cdl_string) + print("------\n") with open(cdl_path, "w") as f_out: f_out.write(cdl_string) return cdl_path - def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=True): + def _load_cube_from_cdl(self, cdl_path, nc_path): """ Load the 'phenom' data variable in a CDL testcase, as a cube. Using ncgen and the selected _load_cube call. - FOR NOW: can select whether load uses Pyke (rules) or newer actions - code. - TODO: remove when Pyke implementation is gone. - """ # Create reference netCDF file from reference CDL. command = "ncgen -o {} {}".format(nc_path, cdl_path) @@ -195,12 +209,12 @@ def create_cube_from_cdl(self, cdl_path, nc_path, use_pyke=True): cf_var = list(cf.cf_group.data_variables.values())[0] cf_var = cf.cf_group.data_variables["phenom"] - if use_pyke: + if self.use_pyke: engine = iris.fileformats.netcdf._pyke_kb_engine_real() else: engine = iris.fileformats._nc_load_rules.engine.Engine() - iris.fileformats.netcdf.DEBUG = True + iris.fileformats.netcdf.DEBUG = self.debug # iris.fileformats.netcdf.LOAD_PYKE = False return _load_cube(engine, cf, cf_var, nc_path) @@ -208,16 +222,17 @@ def run_testcase(self, **testcase_kwargs): """ Run a testcase with chosen optionsm returning a test cube. - The kwargs apply to the 'make_testcase_cdl' method. + The kwargs apply to the '_make_testcase_cdl' method. """ cdl_path = str(self.temp_dirpath / "test.cdl") nc_path = cdl_path.replace(".cdl", ".nc") - self.make_testcase_cdl(cdl_path, **testcase_kwargs) - cube = self.create_cube_from_cdl(cdl_path, nc_path) - print("\nCube:") - print(cube) - print("") + self._make_testcase_cdl(cdl_path, **testcase_kwargs) + cube = self._load_cube_from_cdl(cdl_path, nc_path) + if self.debug: + print("\nCube:") + print(cube) + print("") return cube def check_result( @@ -280,7 +295,8 @@ def check_result( self.assertEqual(lat_cs, cube_cs) -class Test__grid_mapping(Mixin_Test__nc_load_actions, tests.IrisTest): +class Mixin__grid_mapping(Mixin_Test__nc_load_actions): + # Various tests for translation of grid=mappings @classmethod def setUpClass(cls): super().setUpClass() @@ -297,7 +313,7 @@ def test_basic_latlon(self): def test_missing_latlon_radius(self): # Lat-long with a missing earth-radius causes an error. # One of very few cases where activation may encounter an error. - # N.B. doesn't really test rule-activation, but maybe worth doing. + # N.B. doesn't really test rules-activation, but maybe worth doing. with self.assertRaisesRegex(ValueError, "No ellipsoid"): self.run_testcase(gridmapvar_missingradius=True) @@ -333,12 +349,39 @@ def test_mapping_mercator(self): def test_mapping_mercator__fail_unsupported(self): with self.assertWarnsRegexp("not yet supported for Mercator"): + # Set a non-unity scale factor, which mercator cannot handle. result = self.run_testcase( mapping_name=hh.CF_GRID_MAPPING_MERCATOR, - use_bad_mapping_params=True, + mapping_scalefactor=2.0, ) self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) +class Test__grid_mapping__pyke_rules(Mixin__grid_mapping, tests.IrisTest): + # Various tests for translation of grid=mappings + use_pyke = True + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +class Test__grid_mapping__nonpyke_actions(Mixin__grid_mapping, tests.IrisTest): + # Various tests for translation of grid=mappings + use_pyke = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + if __name__ == "__main__": tests.main() From 44c214f2a45748dd8d170ea11fbcd62cc686c2e2 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 12:32:42 +0100 Subject: [PATCH 10/35] Tests for all supported grid-mappings. Note which rules trigger in each case. --- .../load_cube/test__load_cube__activate.py | 201 +++++++++++++++++- 1 file changed, 191 insertions(+), 10 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 3942f4bcb6..44c1e55dc8 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -125,10 +125,12 @@ def _make_testcase_cdl( yco_units = latitude_units elif mapping_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + # Rotated lat-lon coordinates. xco_name = hh.CF_VALUE_STD_NAME_GRID_LON yco_name = hh.CF_VALUE_STD_NAME_GRID_LAT xco_units = "degrees" yco_units = "degrees" + else: # General non-latlon coordinates # Exactly which depends on the grid_mapping name. @@ -153,6 +155,8 @@ def _make_testcase_cdl( {g_varname}:{g_mapname} = "{mapping_name}"; {g_radius_string} """ + + # Add a specified scale-factor if keyword is set if mapping_scalefactor is not None: # Add a specific scale-factor term to the grid mappinf. # Non-unity scale not supported for Mercator/Stereographic. @@ -161,6 +165,54 @@ def _make_testcase_cdl( {g_varname}:{sfapo_name} = {mapping_scalefactor} ; """ + # + # Add various minimal required properties for different grid mappings + # + + # Those which require 'latitude of projection origin' + if mapping_name in ( + hh.CF_GRID_MAPPING_TRANSVERSE, + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{latpo_name} = 0.0 ; + """ + # Those which require 'longitude of projection origin' + if mapping_name in ( + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{lonpo_name} = 0.0 ; + """ + # Those which require 'longitude of central meridian' + if mapping_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN + g_string += f""" + {g_varname}:{latcm_name} = 0.0 ; + """ + # Those which require 'perspective point height' + if mapping_name in ( + hh.CF_GRID_MAPPING_VERTICAL, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + ): + pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT + g_string += f""" + {g_varname}:{pph_name} = 600000.0 ; + """ + # Those which require 'sweep angle axis' + if mapping_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS + g_string += f""" + {g_varname}:{saa_name} = "y" ; + """ + + # Construct the total CDL string cdl_string = f""" netcdf test {{ dimensions: @@ -296,17 +348,21 @@ def check_result( class Mixin__grid_mapping(Mixin_Test__nc_load_actions): - # Various tests for translation of grid=mappings - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() + # Various testcases for translation of grid-mappings def test_basic_latlon(self): # A basic reference example with a lat-long grid. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude + # 006 : fc_build_coordinate_longitude + # Notes: + # grid-mapping: regular latlon + # dim-coords: lat+lon + # coords-build: standard latlon coords (with latlon coord-system) result = self.run_testcase() self.check_result(result) @@ -314,40 +370,137 @@ def test_missing_latlon_radius(self): # Lat-long with a missing earth-radius causes an error. # One of very few cases where activation may encounter an error. # N.B. doesn't really test rules-activation, but maybe worth doing. + # (no rules trigger) with self.assertRaisesRegex(ValueError, "No ellipsoid"): self.run_testcase(gridmapvar_missingradius=True) def test_bad_gridmapping_nameproperty(self): # Fix the 'grid' var so it does not register as a grid-mapping. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_latitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_latitude_nocs + # 005 : fc_build_coordinate_longitude_nocs + # Notes: + # grid-mapping: NONE + # dim-coords: lat+lon + # coords-build: latlon coords NO coord-system result = self.run_testcase(gridmapvar_mappropertyname="mappy") self.check_result(result, cube_no_cs=True) def test_latlon_bad_gridmapping_varname(self): # rename the grid-mapping variable so it is effectively 'missing'. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_latitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_latitude_nocs + # 005 : fc_build_coordinate_longitude_nocs + # Notes: + # no coord-system + # all the same as test_bad_gridmapping_nameproperty with self.assertWarnsRegexp("Missing.*grid mapping variable 'grid'"): result = self.run_testcase(gridmapvar_name="grid_2") self.check_result(result, cube_no_cs=True) def test_latlon_bad_latlon_unit(self): # Check with bad latitude units : 'degrees' in place of 'degrees_north'. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_longitude + # 005 : fc_default_coordinate + # Notes: + # grid-mapping: regular latlon + # dim-coords: + # x is regular longitude dim-coord + # y is 'default' coord ==> builds as an 'extra' dim-coord + # coords-build: + # x(lon) is regular latlon with coord-system + # y(lat) is a dim-coord, but NO coord-system result = self.run_testcase(latitude_units="degrees") self.check_result(result, latitude_no_cs=True) def test_mapping_rotated(self): + # Test with rotated-latlon grid-mapping + # Distinct from both regular-latlon and non-latlon cases, as the + # coordinate standard names and units are different. + # (run_testcase/_make_testcase_cdl know how to handle that). + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude_rotated + # 006 : fc_build_coordinate_longitude_rotated + # Notes: + # grid-mapping: rotated lat-lon + # dim-coords: lat+lon + # coords-build: lat+lon coords ROTATED, with coord-system + # (rotated means different name + units) result = self.run_testcase( mapping_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON ) self.check_result(result, cube_cstype=ics.RotatedGeogCS) + # + # All non-latlon coordinate systems ... + # These all have projection-x/y coordinates with units of metres + # They all work the same way, except that Mercator/Stereographic have + # parameter checking routines that can fail. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_ + # 003 : fc_provides_projection_x_coordinate + # 004 : fc_provides_projection_y_coordinate + # 005 : fc_build_coordinate_projection_x_ + # 006 : fc_build_coordinate_projection_y_ + # Notes: + # grid-mapping: + # dim-coords: proj-x and -y + # coords-build: proj-x/-y_, with coord-system + def test_mapping_albers(self): result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_ALBERS) self.check_result(result, cube_cstype=ics.AlbersEqualArea) + def test_mapping_geostationary(self): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_GEOSTATIONARY + ) + self.check_result(result, cube_cstype=ics.Geostationary) + + def test_mapping_lambert_azimuthal(self): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL + ) + self.check_result(result, cube_cstype=ics.LambertAzimuthalEqualArea) + + def test_mapping_lambert_conformal(self): + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL + ) + self.check_result(result, cube_cstype=ics.LambertConformal) + def test_mapping_mercator(self): result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_MERCATOR) self.check_result(result, cube_cstype=ics.Mercator) def test_mapping_mercator__fail_unsupported(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_projection_x_coordinate + # 003 : fc_provides_projection_y_coordinate + # Notes: + # grid-mapping: NONE + # dim-coords: proj-x and -y + # coords-build: NONE + # = NO coord-system + # = NO dim-coords built (cube has no coords) with self.assertWarnsRegexp("not yet supported for Mercator"): # Set a non-unity scale factor, which mercator cannot handle. result = self.run_testcase( @@ -356,9 +509,37 @@ def test_mapping_mercator__fail_unsupported(self): ) self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + def test_mapping_stereographic(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_STEREO) + self.check_result(result, cube_cstype=ics.Stereographic) + + def test_mapping_stereographic__fail_unsupported(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_projection_x_coordinate + # 003 : fc_provides_projection_y_coordinate + # Notes: + # as for 'mercator__fail_unsupported', above + # = NO dim-coords built (cube has no coords) + with self.assertWarnsRegexp("not yet supported for stereographic"): + # Set a non-unity scale factor, which stereo cannot handle. + result = self.run_testcase( + mapping_name=hh.CF_GRID_MAPPING_STEREO, + mapping_scalefactor=2.0, + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping_transverse_mercator(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_TRANSVERSE) + self.check_result(result, cube_cstype=ics.TransverseMercator) + + def test_mapping_vertical_perspective(self): + result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_VERTICAL) + self.check_result(result, cube_cstype=ics.VerticalPerspective) + class Test__grid_mapping__pyke_rules(Mixin__grid_mapping, tests.IrisTest): - # Various tests for translation of grid=mappings + # Run grid-mapping tests with Pyke (rules) use_pyke = True @classmethod @@ -371,7 +552,7 @@ def tearDownClass(cls): class Test__grid_mapping__nonpyke_actions(Mixin__grid_mapping, tests.IrisTest): - # Various tests for translation of grid=mappings + # Run grid-mapping tests with non-Pyke (actions) use_pyke = False @classmethod From 77a1d95322e776329e6976d306962182436f5a79 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 14:23:44 +0100 Subject: [PATCH 11/35] Disable testing against non-pyke code, for now. --- .../fileformats/netcdf/load_cube/test__load_cube__activate.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 44c1e55dc8..62fe79a47f 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -551,6 +551,10 @@ def tearDownClass(cls): super().tearDownClass() +from unittest import skip + + +@skip class Test__grid_mapping__nonpyke_actions(Mixin__grid_mapping, tests.IrisTest): # Run grid-mapping tests with non-Pyke (actions) use_pyke = False From 2fe9bc92459cd0ad2d61bfa8ca2e4ce052ff9b96 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 14:42:25 +0100 Subject: [PATCH 12/35] Revert old tests/test_netcdf.py --- lib/iris/tests/test_netcdf.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index 9fc7fdefc1..2d1b4a53d5 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -41,13 +41,9 @@ @tests.skip_data class TestNetCDFLoad(tests.IrisTest): def setUp(self): - iris.fileformats.netcdf.DEBUG = True - iris.fileformats.netcdf.LOAD_PYKE = False self.tmpdir = None def tearDown(self): - iris.fileformats.netcdf.DEBUG = False - iris.fileformats.netcdf.LOAD_PYKE = True if self.tmpdir is not None: shutil.rmtree(self.tmpdir) @@ -131,24 +127,11 @@ def test_load_global_xyzt_gems_iter(self): def test_load_rotated_xy_land(self): # Test loading single xy rotated pole CF-netCDF file. - iris.fileformats.netcdf.LOAD_PYKE = True - print("Pyke version:") cube = iris.load_cube( tests.get_data_path( ("NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc") ) ) - print(cube) - iris.fileformats.netcdf.LOAD_PYKE = False - print("") - print("NON-Pyke version:") - cube = iris.load_cube( - tests.get_data_path( - ("NetCDF", "rotated", "xy", "rotPole_landAreaFraction.nc") - ) - ) - print(cube) - # Make sure the AuxCoords have lazy data. self.assertTrue(is_lazy_data(cube.coord("latitude").core_points())) self.assertCML(cube, ("netcdf", "netcdf_rotated_xy_land.cml")) @@ -628,8 +611,6 @@ def test_no_name_cube(self): class TestNetCDFSave(tests.IrisTest): def setUp(self): - iris.fileformats.netcdf.DEBUG = True - iris.fileformats.netcdf.LOAD_PYKE = False self.cubell = iris.cube.Cube( np.arange(4).reshape(2, 2), "air_temperature" ) From 3a467be7c174def40a0983890697bd215100b4b0 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 15:17:41 +0100 Subject: [PATCH 13/35] Odd clarifying comments. --- .../load_cube/test__load_cube__activate.py | 41 +++++++++++++++---- 1 file changed, 33 insertions(+), 8 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 62fe79a47f..cd62c9b481 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -108,16 +108,29 @@ def _make_testcase_cdl( ): """ Write a testcase example into a CDL file. + + This is the "master" routine for creating all our testcases. + Kwarg options modify a simple default testcase with a latlon grid. + The routine handles the various testcase options and their possible + interactions. This includes knowing what extra changes are required + to support different grid-mapping types (for example). + """ - # Grid-mapping options are standard-latlon, rotated, or non-latlon. + # The grid-mapping options are standard-latlon, rotated, or non-latlon. # This affects names+units of the X and Y coords. + # We don't have an option to *not* include a grid-mapping variable, but + # we can mimic a missing grid-mapping by changing the varname from that + # which the data-variable refers to, with "gridmapvar_name=xxx". + # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by + # selecting an unkown 'grid_mapping_name' property, with + # "gridmapvar_mappropertyname=xxx". if mapping_name is None: # Default grid-mapping and coords are standard lat-lon. mapping_name = hh.CF_GRID_MAPPING_LAT_LON xco_name = hh.CF_VALUE_STD_NAME_LON yco_name = hh.CF_VALUE_STD_NAME_LAT xco_units = "degrees_east" - # Special cases override some of the values. + # Special kwarg overrides some of the values. if latitude_units is None: yco_units = "degrees_north" else: @@ -140,12 +153,16 @@ def _make_testcase_cdl( yco_units = "m" grid_mapping_name = "grid" + # Options can override the gridvar name, and its 'grid+mapping_name' + # property. g_varname = gridmapvar_name g_mapname = gridmapvar_mappropertyname if g_varname is None: g_varname = grid_mapping_name if g_mapname is None: g_mapname = "grid_mapping_name" + + # Omit the earth radius, if requested. if gridmapvar_missingradius: g_radius_string = "" else: @@ -156,17 +173,18 @@ def _make_testcase_cdl( {g_radius_string} """ - # Add a specified scale-factor if keyword is set + # Add a specified scale-factor, if requested. if mapping_scalefactor is not None: - # Add a specific scale-factor term to the grid mappinf. - # Non-unity scale not supported for Mercator/Stereographic. + # Add a specific scale-factor term to the grid mapping. + # (Non-unity scale is not supported for Mercator/Stereographic). sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN g_string += f""" {g_varname}:{sfapo_name} = {mapping_scalefactor} ; """ # - # Add various minimal required properties for different grid mappings + # Add various additional (minimal) required properties for different + # grid mapping types. # # Those which require 'latitude of projection origin' @@ -390,7 +408,8 @@ def test_bad_gridmapping_nameproperty(self): self.check_result(result, cube_no_cs=True) def test_latlon_bad_gridmapping_varname(self): - # rename the grid-mapping variable so it is effectively 'missing'. + # rename the grid-mapping variable so it is effectively 'missing' + # (I.E. the var named in "data-variable:grid_mapping" does not exist). # Rules Triggered: # 001 : fc_default # 002 : fc_provides_coordinate_latitude @@ -421,6 +440,8 @@ def test_latlon_bad_latlon_unit(self): # coords-build: # x(lon) is regular latlon with coord-system # y(lat) is a dim-coord, but NO coord-system + # = "fc_provides_coordinate_latitude" does not trigger, because it is + # not a valid latitude coordinate. result = self.run_testcase(latitude_units="degrees") self.check_result(result, latitude_no_cs=True) @@ -449,9 +470,13 @@ def test_mapping_rotated(self): # # All non-latlon coordinate systems ... - # These all have projection-x/y coordinates with units of metres + # These all have projection-x/y coordinates with units of metres. # They all work the same way, except that Mercator/Stereographic have # parameter checking routines that can fail. + # NOTE: various mapping types *require* certain addtional properties + # - without which an error will occur during translation. + # - run_testcase/_make_testcase_cdl know how to provide these + # # Rules Triggered: # 001 : fc_default # 002 : fc_provides_grid_mapping_ From f2868da3d03bb07120588927b65ead353c49484a Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 15:49:26 +0100 Subject: [PATCH 14/35] Tidy testcase creation a bit. --- .../load_cube/test__load_cube__activate.py | 26 +++++++++++-------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index cd62c9b481..5cee07678e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -98,7 +98,6 @@ def tearDownClass(cls): def _make_testcase_cdl( self, - cdl_path, latitude_units=None, gridmapvar_name=None, gridmapvar_mappropertyname=None, @@ -107,7 +106,7 @@ def _make_testcase_cdl( mapping_scalefactor=None, ): """ - Write a testcase example into a CDL file. + Create a CDL string for a testcase. This is the "master" routine for creating all our testcases. Kwarg options modify a simple default testcase with a latlon grid. @@ -259,21 +258,26 @@ def _make_testcase_cdl( print("File content:") print(cdl_string) print("------\n") - with open(cdl_path, "w") as f_out: - f_out.write(cdl_string) - return cdl_path + return cdl_string - def _load_cube_from_cdl(self, cdl_path, nc_path): + def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): """ Load the 'phenom' data variable in a CDL testcase, as a cube. - Using ncgen and the selected _load_cube call. + Using ncgen, CFReader and the _load_cube call. + Can use a genuine Pyke engine, or the actions mimic engine, + selected by `self.use_pyke`. """ - # Create reference netCDF file from reference CDL. + # Write the CDL to a file. + with open(cdl_path, "w") as f_out: + f_out.write(cdl_string) + + # Create a netCDF file from the CDL file. command = "ncgen -o {} {}".format(nc_path, cdl_path) subprocess.check_call(command, shell=True) + # Simulate the inner part of the file reading process. cf = CFReader(nc_path) # Grab a data variable : FOR NOW, should be only 1 cf_var = list(cf.cf_group.data_variables.values())[0] @@ -290,15 +294,15 @@ def _load_cube_from_cdl(self, cdl_path, nc_path): def run_testcase(self, **testcase_kwargs): """ - Run a testcase with chosen optionsm returning a test cube. + Run a testcase with chosen options, returning a test cube. The kwargs apply to the '_make_testcase_cdl' method. """ cdl_path = str(self.temp_dirpath / "test.cdl") nc_path = cdl_path.replace(".cdl", ".nc") - self._make_testcase_cdl(cdl_path, **testcase_kwargs) - cube = self._load_cube_from_cdl(cdl_path, nc_path) + cdl_string = self._make_testcase_cdl(**testcase_kwargs) + cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) if self.debug: print("\nCube:") print(cube) From 19797925b92fab1ee5d410c8f334cfb29ba1bcea Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 18:20:21 +0100 Subject: [PATCH 15/35] Tests for mapping-types mismatch between coords+grid-mapping; Test nonmonotonic dimcoord values; Test warnings (or not). --- .../load_cube/test__load_cube__activate.py | 452 +++++++++++++++--- 1 file changed, 380 insertions(+), 72 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py index 5cee07678e..4d40370e56 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py @@ -101,9 +101,14 @@ def _make_testcase_cdl( latitude_units=None, gridmapvar_name=None, gridmapvar_mappropertyname=None, - gridmapvar_missingradius=False, - mapping_name=None, + mapping_missingradius=False, + mapping_type_name=None, mapping_scalefactor=None, + yco_values=None, + xco_name=None, + yco_name=None, + xco_units=None, + yco_units=None, ): """ Create a CDL string for a testcase. @@ -123,52 +128,62 @@ def _make_testcase_cdl( # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by # selecting an unkown 'grid_mapping_name' property, with # "gridmapvar_mappropertyname=xxx". - if mapping_name is None: + if mapping_type_name is None: # Default grid-mapping and coords are standard lat-lon. - mapping_name = hh.CF_GRID_MAPPING_LAT_LON - xco_name = hh.CF_VALUE_STD_NAME_LON - yco_name = hh.CF_VALUE_STD_NAME_LAT - xco_units = "degrees_east" + mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON + xco_name_default = hh.CF_VALUE_STD_NAME_LON + yco_name_default = hh.CF_VALUE_STD_NAME_LAT + xco_units_default = "degrees_east" # Special kwarg overrides some of the values. if latitude_units is None: - yco_units = "degrees_north" + yco_units_default = "degrees_north" else: # Override the latitude units (to invalidate). - yco_units = latitude_units + yco_units_default = latitude_units - elif mapping_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: # Rotated lat-lon coordinates. - xco_name = hh.CF_VALUE_STD_NAME_GRID_LON - yco_name = hh.CF_VALUE_STD_NAME_GRID_LAT - xco_units = "degrees" - yco_units = "degrees" + xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON + yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT + xco_units_default = "degrees" + yco_units_default = "degrees" else: # General non-latlon coordinates # Exactly which depends on the grid_mapping name. - xco_name = hh.CF_VALUE_STD_NAME_PROJ_X - yco_name = hh.CF_VALUE_STD_NAME_PROJ_Y - xco_units = "m" - yco_units = "m" + xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X + yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y + xco_units_default = "m" + yco_units_default = "m" + + # Options can override coord (standard) names and units. + if xco_name is None: + xco_name = xco_name_default + if yco_name is None: + yco_name = yco_name_default + if xco_units is None: + xco_units = xco_units_default + if yco_units is None: + yco_units = yco_units_default grid_mapping_name = "grid" - # Options can override the gridvar name, and its 'grid+mapping_name' - # property. + # Options can override the gridvar name and properties. g_varname = gridmapvar_name g_mapname = gridmapvar_mappropertyname if g_varname is None: g_varname = grid_mapping_name if g_mapname is None: + # If you change this, it is no longer a valid grid-mapping var. g_mapname = "grid_mapping_name" # Omit the earth radius, if requested. - if gridmapvar_missingradius: + if mapping_missingradius: g_radius_string = "" else: g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" g_string = f""" int {g_varname} ; - {g_varname}:{g_mapname} = "{mapping_name}"; + {g_varname}:{g_mapname} = "{mapping_type_name}"; {g_radius_string} """ @@ -187,7 +202,7 @@ def _make_testcase_cdl( # # Those which require 'latitude of projection origin' - if mapping_name in ( + if mapping_type_name in ( hh.CF_GRID_MAPPING_TRANSVERSE, hh.CF_GRID_MAPPING_STEREO, hh.CF_GRID_MAPPING_GEOSTATIONARY, @@ -198,7 +213,7 @@ def _make_testcase_cdl( {g_varname}:{latpo_name} = 0.0 ; """ # Those which require 'longitude of projection origin' - if mapping_name in ( + if mapping_type_name in ( hh.CF_GRID_MAPPING_STEREO, hh.CF_GRID_MAPPING_GEOSTATIONARY, hh.CF_GRID_MAPPING_VERTICAL, @@ -208,13 +223,13 @@ def _make_testcase_cdl( {g_varname}:{lonpo_name} = 0.0 ; """ # Those which require 'longitude of central meridian' - if mapping_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN g_string += f""" {g_varname}:{latcm_name} = 0.0 ; """ # Those which require 'perspective point height' - if mapping_name in ( + if mapping_type_name in ( hh.CF_GRID_MAPPING_VERTICAL, hh.CF_GRID_MAPPING_GEOSTATIONARY, ): @@ -223,12 +238,18 @@ def _make_testcase_cdl( {g_varname}:{pph_name} = 600000.0 ; """ # Those which require 'sweep angle axis' - if mapping_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS g_string += f""" {g_varname}:{saa_name} = "y" ; """ + # y-coord values + if yco_values is None: + yco_values = [10.0, 20.0] + yco_value_strings = [str(val) for val in yco_values] + yco_values_string = ", ".join(yco_value_strings) + # Construct the total CDL string cdl_string = f""" netcdf test {{ @@ -250,7 +271,7 @@ def _make_testcase_cdl( xco:standard_name = "{xco_name}" ; {g_string} data: - yco = 10., 20. ; + yco = {yco_values_string} ; xco = 100., 110., 120. ; }} """ @@ -292,7 +313,7 @@ def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): # iris.fileformats.netcdf.LOAD_PYKE = False return _load_cube(engine, cf, cf_var, nc_path) - def run_testcase(self, **testcase_kwargs): + def run_testcase(self, warning=None, **testcase_kwargs): """ Run a testcase with chosen options, returning a test cube. @@ -302,7 +323,12 @@ def run_testcase(self, **testcase_kwargs): cdl_path = str(self.temp_dirpath / "test.cdl") nc_path = cdl_path.replace(".cdl", ".nc") cdl_string = self._make_testcase_cdl(**testcase_kwargs) - cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) + if warning is None: + context = self.assertNoWarningsRegexp() + else: + context = self.assertWarnsRegexp(warning) + with context: + cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) if self.debug: print("\nCube:") print(cube) @@ -315,7 +341,11 @@ def check_result( cube_cstype=None, cube_no_cs=False, cube_no_xycoords=False, - latitude_no_cs=False, + xco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_is_aux=False, + xco_stdname=True, + yco_stdname=True, ): """ Check key properties of a result cube. @@ -325,13 +355,18 @@ def check_result( self.assertEqual(cube.standard_name, "air_temperature") self.assertEqual(cube.var_name, "phenom") - x_coords = cube.coords(axis="x") - y_coords = cube.coords(axis="y") - expected_dim_coords = x_coords + y_coords + x_coords = cube.coords(dimensions=(1,)) + y_coords = cube.coords(dimensions=(0,)) + if yco_is_aux: + expected_dim_coords = x_coords + expected_aux_coords = y_coords + else: + expected_dim_coords = x_coords + y_coords + expected_aux_coords = [] + self.assertEqual( set(expected_dim_coords), set(cube.coords(dim_coords=True)) ) - # These are exactly the coords we have. if cube_no_xycoords: self.assertEqual(expected_dim_coords, []) x_coord = None @@ -342,31 +377,54 @@ def check_result( self.assertEqual(len(y_coords), 1) (y_coord,) = y_coords - expected_aux_coords = [] - # These are exactly the coords we have. self.assertEqual( set(expected_aux_coords), set(cube.coords(dim_coords=False)) ) + if x_coord: + if xco_stdname is None: + # no check + pass + elif xco_stdname is True: + self.assertIsNotNone(x_coord.standard_name) + elif xco_stdname is False: + self.assertIsNone(x_coord.standard_name) + else: + self.assertEqual(x_coord.standard_name, xco_stdname) + + if y_coord: + if yco_stdname is None: + # no check + pass + if yco_stdname is True: + self.assertIsNotNone(y_coord.standard_name) + elif yco_stdname is False: + self.assertIsNone(y_coord.standard_name) + else: + self.assertEqual(y_coord.standard_name, yco_stdname) + cube_cs = cube.coord_system() if cube_no_xycoords: - lat_cs = None - lon_cs = None + yco_cs = None + xco_cs = None else: - lat_cs = y_coord.coord_system - lon_cs = x_coord.coord_system + yco_cs = y_coord.coord_system + xco_cs = x_coord.coord_system if cube_no_cs: self.assertIsNone(cube_cs) - self.assertIsNone(lat_cs) - self.assertIsNone(lon_cs) + self.assertIsNone(yco_cs) + self.assertIsNone(xco_cs) else: if cube_cstype is not None: self.assertIsInstance(cube_cs, cube_cstype) - self.assertEqual(lon_cs, cube_cs) - if latitude_no_cs: - self.assertIsNone(lat_cs) + if xco_no_cs: + self.assertIsNone(xco_cs) + else: + self.assertEqual(xco_cs, cube_cs) + if yco_no_cs: + self.assertIsNone(yco_cs) else: - self.assertEqual(lat_cs, cube_cs) + self.assertEqual(yco_cs, cube_cs) class Mixin__grid_mapping(Mixin_Test__nc_load_actions): @@ -394,7 +452,7 @@ def test_missing_latlon_radius(self): # N.B. doesn't really test rules-activation, but maybe worth doing. # (no rules trigger) with self.assertRaisesRegex(ValueError, "No ellipsoid"): - self.run_testcase(gridmapvar_missingradius=True) + self.run_testcase(mapping_missingradius=True) def test_bad_gridmapping_nameproperty(self): # Fix the 'grid' var so it does not register as a grid-mapping. @@ -423,8 +481,8 @@ def test_latlon_bad_gridmapping_varname(self): # Notes: # no coord-system # all the same as test_bad_gridmapping_nameproperty - with self.assertWarnsRegexp("Missing.*grid mapping variable 'grid'"): - result = self.run_testcase(gridmapvar_name="grid_2") + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase(warning=warning, gridmapvar_name="grid_2") self.check_result(result, cube_no_cs=True) def test_latlon_bad_latlon_unit(self): @@ -447,7 +505,7 @@ def test_latlon_bad_latlon_unit(self): # = "fc_provides_coordinate_latitude" does not trigger, because it is # not a valid latitude coordinate. result = self.run_testcase(latitude_units="degrees") - self.check_result(result, latitude_no_cs=True) + self.check_result(result, yco_no_cs=True) def test_mapping_rotated(self): # Test with rotated-latlon grid-mapping @@ -468,7 +526,7 @@ def test_mapping_rotated(self): # coords-build: lat+lon coords ROTATED, with coord-system # (rotated means different name + units) result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON ) self.check_result(result, cube_cstype=ics.RotatedGeogCS) @@ -494,29 +552,31 @@ def test_mapping_rotated(self): # coords-build: proj-x/-y_, with coord-system def test_mapping_albers(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_ALBERS) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_ALBERS) self.check_result(result, cube_cstype=ics.AlbersEqualArea) def test_mapping_geostationary(self): result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_GEOSTATIONARY + mapping_type_name=hh.CF_GRID_MAPPING_GEOSTATIONARY ) self.check_result(result, cube_cstype=ics.Geostationary) def test_mapping_lambert_azimuthal(self): result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL + mapping_type_name=hh.CF_GRID_MAPPING_LAMBERT_AZIMUTHAL ) self.check_result(result, cube_cstype=ics.LambertAzimuthalEqualArea) def test_mapping_lambert_conformal(self): result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL + mapping_type_name=hh.CF_GRID_MAPPING_LAMBERT_CONFORMAL ) self.check_result(result, cube_cstype=ics.LambertConformal) def test_mapping_mercator(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_MERCATOR) + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR + ) self.check_result(result, cube_cstype=ics.Mercator) def test_mapping_mercator__fail_unsupported(self): @@ -530,16 +590,17 @@ def test_mapping_mercator__fail_unsupported(self): # coords-build: NONE # = NO coord-system # = NO dim-coords built (cube has no coords) - with self.assertWarnsRegexp("not yet supported for Mercator"): - # Set a non-unity scale factor, which mercator cannot handle. - result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_MERCATOR, - mapping_scalefactor=2.0, - ) + # Set a non-unity scale factor, which mercator cannot handle. + warning = "not yet supported for Mercator" + result = self.run_testcase( + warning=warning, + mapping_type_name=hh.CF_GRID_MAPPING_MERCATOR, + mapping_scalefactor=2.0, + ) self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) def test_mapping_stereographic(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_STEREO) + result = self.run_testcase(mapping_type_name=hh.CF_GRID_MAPPING_STEREO) self.check_result(result, cube_cstype=ics.Stereographic) def test_mapping_stereographic__fail_unsupported(self): @@ -550,22 +611,239 @@ def test_mapping_stereographic__fail_unsupported(self): # Notes: # as for 'mercator__fail_unsupported', above # = NO dim-coords built (cube has no coords) - with self.assertWarnsRegexp("not yet supported for stereographic"): - # Set a non-unity scale factor, which stereo cannot handle. - result = self.run_testcase( - mapping_name=hh.CF_GRID_MAPPING_STEREO, - mapping_scalefactor=2.0, - ) + # + # Set a non-unity scale factor, which stereo cannot handle. + warning = "not yet supported for stereographic" + result = self.run_testcase( + warning=warning, + mapping_type_name=hh.CF_GRID_MAPPING_STEREO, + mapping_scalefactor=2.0, + ) self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) def test_mapping_transverse_mercator(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_TRANSVERSE) + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_TRANSVERSE + ) self.check_result(result, cube_cstype=ics.TransverseMercator) def test_mapping_vertical_perspective(self): - result = self.run_testcase(mapping_name=hh.CF_GRID_MAPPING_VERTICAL) + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_VERTICAL + ) self.check_result(result, cube_cstype=ics.VerticalPerspective) + def test_mapping_unsupported(self): + # Use azimuthal, which is a real thing but we don't yet support it. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_projection_x_coordinate + # 003 : fc_provides_projection_y_coordinate + # NOTES: + # - there is no warning for this. + # TODO: perhaps there should be ? + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_AZIMUTHAL + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping_undefined(self): + # Use a random, unknown "mapping type". + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_projection_x_coordinate + # 003 : fc_provides_projection_y_coordinate + # NOTES: + # - there is no warning for this. + # TODO: perhaps there should be ? + result = self.run_testcase(mapping_type_name="unknown") + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + # + # Cases where names(+units) of coords don't match the grid-mapping type + # Effectively, there are 9 possibilities for (latlon/rotated/projected) + # coords against (latlon/rotated/projected/missing) coord-systems. + # N.B. the results are not all the same ... + # + + def test_mapping__mismatch__latlon_coords_rotated_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # NOTES: + # no build_coord triggers, as it requires the correct mapping type + # so no dim-coords at all in this case + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_name="longitude", + xco_units="degrees_east", + yco_name="latitude", + yco_units="degrees_north", + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping__mismatch__latlon_coords_nonll_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_albers_equal_area + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude_nocs + # 006 : fc_build_coordinate_longitude_nocs + # NOTES: + # build_coord_XXX_cs triggers, requires NO latlon/rotated mapping + # - but a non-ll mapping is 'ok'. + # TODO: not really clear why this is right ? + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, + xco_name="longitude", + xco_units="degrees_east", + yco_name="latitude", + yco_units="degrees_north", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__latlon_coords_missing_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_latitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_latitude_nocs + # 005 : fc_build_coordinate_longitude_nocs + # NOTES: + # same as nonll, except *NO* grid-mapping is detected, + # - which makes no practical difference + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase( + warning=warning, + gridmapvar_name="moved", + xco_name="longitude", + xco_units="degrees_east", + yco_name="latitude", + yco_units="degrees_north", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__rotated_coords_latlon_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # NOTES: + # no build_coord triggers : requires NO latlon/rotated mapping + # hence no coords at all + result = self.run_testcase( + xco_name="grid_longitude", + xco_units="degrees", + yco_name="grid_latitude", + yco_units="degrees", + ) + self.check_result(result, cube_no_cs=True, cube_no_xycoords=True) + + def test_mapping__mismatch__rotated_coords_nonll_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_albers_equal_area + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude_nocs + # 006 : fc_build_coordinate_longitude_nocs + # NOTES: + # this is different from the previous + # build_coord.._nocs triggers : requires NO latlon/rotated mapping + # - which seems odd + inconsistent (with previous) ? + # TODO: should this change ?? + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ALBERS, + xco_name="grid_longitude", + xco_units="degrees", + yco_name="grid_latitude", + yco_units="degrees", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__rotated_coords_missing_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_latitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_coordinate_latitude_nocs + # 005 : fc_build_coordinate_longitude_nocs + # NOTES: + # as previous, but no grid-mapping (which makes no difference) + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase( + warning=warning, + gridmapvar_name="moved", + xco_name="grid_longitude", + xco_units="degrees", + yco_name="grid_latitude", + yco_units="degrees", + ) + self.check_result(result, cube_no_cs=True) + + def test_mapping__mismatch__nonll_coords_latlon_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_default_coordinate + # 004 : fc_default_coordinate + # NOTES: + # dim-coords built as "defaults" : dim-coords, but NO standard name + result = self.run_testcase( + xco_name="projection_x", + xco_units="m", + yco_name="projection_y", + yco_units="m", + ) + self.check_result( + result, cube_no_cs=True, xco_stdname=False, yco_stdname=False + ) + + def test_mapping__mismatch__nonll_coords_rotated_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_default_coordinate + # 004 : fc_default_coordinate + # NOTES: + # same as previous __mismatch__nonll_ + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_name="projection_x", + xco_units="m", + yco_name="projection_y", + yco_units="m", + ) + self.check_result( + result, cube_no_cs=True, xco_stdname=False, yco_stdname=False + ) + + def test_mapping__mismatch__nonll_coords_missing_system(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_default_coordinate + # 003 : fc_default_coordinate + # NOTES: + # effectively, just like previous 2 __mismatch__nonll_ + warning = "Missing.*grid mapping variable 'grid'" + result = self.run_testcase( + warning=warning, + gridmapvar_name="moved", + xco_name="projection_x", + xco_units="m", + yco_name="projection_y", + yco_units="m", + ) + self.check_result( + result, cube_no_cs=True, xco_stdname=False, yco_stdname=False + ) + class Test__grid_mapping__pyke_rules(Mixin__grid_mapping, tests.IrisTest): # Run grid-mapping tests with Pyke (rules) @@ -597,5 +875,35 @@ def tearDownClass(cls): super().tearDownClass() +class Test__additional(Mixin_Test__nc_load_actions, tests.IrisTest): + # Run grid-mapping tests with non-Pyke (actions) + use_pyke = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_nondim_lats(self): + # Check what happens when values don't allow a coord to be dim-coord. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude) + # 006 : fc_build_coordinate_(longitude) + # NOTES: + # in terms of rule triggers, this is not distinct from a normal case + # - but the latitude is now an aux-coord. + warning = "must be.* monotonic" + result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) + self.check_result(result, yco_is_aux=True) + + if __name__ == "__main__": tests.main() From 030b746864c0269b2227c48d722e13466c9a5d1c Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 2 Jun 2021 18:51:17 +0100 Subject: [PATCH 16/35] Refactor to put different test areas in their own sourcefiles. --- .../load_cube/load_cube__activate/__init__.py | 425 +++++++++++++++++ .../load_cube__activate/test__additional.py | 56 +++ .../test__grid_mappings.py} | 447 +----------------- 3 files changed, 486 insertions(+), 442 deletions(-) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py rename lib/iris/tests/unit/fileformats/netcdf/load_cube/{test__load_cube__activate.py => load_cube__activate/test__grid_mappings.py} (53%) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py new file mode 100644 index 0000000000..6d6c1a92f4 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -0,0 +1,425 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +For now, these tests are designed to function with **either** the "old" +Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the +"new" :mod:`iris.fileformats._nc_load_rules`. +Both of those supply an "engine" with an "activate" method + -- at least for now : may be simplified in future. + +""" +from pathlib import Path +import shutil +import subprocess +import tempfile + +from iris.fileformats.cf import CFReader +import iris.fileformats.netcdf +from iris.fileformats.netcdf import _load_cube +import iris.fileformats._nc_load_rules.engine +import iris.fileformats._nc_load_rules.helpers as hh + +""" +Notes on testing method. + +IN cf : "def _load_cube(engine, cf, cf_var, filename)" +WHERE: + - engine is a :class:`pyke.knowledge_engine.engine` + -- **OR** :class:`iris.fileformats._nc_load_rules.engine.Engine` + - cf is a CFReader + - cf_var is a CFDAtaVariable + +As it's hard to construct a suitable CFReader from scratch, it would seem +simpler (for now) to use an ACTUAL FILE. +Likewise, the easiest approach to that is with CDL and "ncgen". +To do this, we need a test "fixture" that can create suitable test files in a +temporary directory. + +""" + + +class Mixin__nc_load_actions: + """ + Class to make testcases for rules or actions code, and check results. + + Defines standard setUpClass/tearDownClass methods, to create a temporary + directory for intermediate files. + NOTE: owing to peculiarities of unittest, these must be explicitly called + from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the + actual Test_XXX class which also inherits unittest.TestCase. + + Testcases are manufactured by the '_make_testcase_cdl' method. + These are based on a 'standard simple latlon grid' example. + Various kwargs control variations on this. + + The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes + a result cube (by: producing cdl, converting to netcdf, and loading). + + The 'check_result' method performs various checks on the result, with + kwargs controlling the expected properties to be tested against. + This usage is *also* based on the 'standard simple latlon grid' example, + the kwargs specify expected differences from that. + + Can also test with either the Pyke(rules) or non-Pyke (actions) + implementations (for now). + + """ + + # + # "global" test settings + # + + # whether to test 'rules' or 'actions' implementations + # TODO: remove when Pyke is gone + use_pyke = True + + # whether to output various debug info + # TODO: ?possibly? remove when development is complete + debug = False + + @classmethod + def setUpClass(cls): + # # Control which testing method we are applying. + # Create a temp directory for temp files. + cls.temp_dirpath = Path(tempfile.mkdtemp()) + + @classmethod + def tearDownClass(cls): + # Destroy a temp directory for temp files. + shutil.rmtree(cls.temp_dirpath) + + def _make_testcase_cdl( + self, + latitude_units=None, + gridmapvar_name=None, + gridmapvar_mappropertyname=None, + mapping_missingradius=False, + mapping_type_name=None, + mapping_scalefactor=None, + yco_values=None, + xco_name=None, + yco_name=None, + xco_units=None, + yco_units=None, + ): + """ + Create a CDL string for a testcase. + + This is the "master" routine for creating all our testcases. + Kwarg options modify a simple default testcase with a latlon grid. + The routine handles the various testcase options and their possible + interactions. This includes knowing what extra changes are required + to support different grid-mapping types (for example). + + """ + # The grid-mapping options are standard-latlon, rotated, or non-latlon. + # This affects names+units of the X and Y coords. + # We don't have an option to *not* include a grid-mapping variable, but + # we can mimic a missing grid-mapping by changing the varname from that + # which the data-variable refers to, with "gridmapvar_name=xxx". + # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by + # selecting an unkown 'grid_mapping_name' property, with + # "gridmapvar_mappropertyname=xxx". + if mapping_type_name is None: + # Default grid-mapping and coords are standard lat-lon. + mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON + xco_name_default = hh.CF_VALUE_STD_NAME_LON + yco_name_default = hh.CF_VALUE_STD_NAME_LAT + xco_units_default = "degrees_east" + # Special kwarg overrides some of the values. + if latitude_units is None: + yco_units_default = "degrees_north" + else: + # Override the latitude units (to invalidate). + yco_units_default = latitude_units + + elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + # Rotated lat-lon coordinates. + xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON + yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT + xco_units_default = "degrees" + yco_units_default = "degrees" + + else: + # General non-latlon coordinates + # Exactly which depends on the grid_mapping name. + xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X + yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y + xco_units_default = "m" + yco_units_default = "m" + + # Options can override coord (standard) names and units. + if xco_name is None: + xco_name = xco_name_default + if yco_name is None: + yco_name = yco_name_default + if xco_units is None: + xco_units = xco_units_default + if yco_units is None: + yco_units = yco_units_default + + grid_mapping_name = "grid" + # Options can override the gridvar name and properties. + g_varname = gridmapvar_name + g_mapname = gridmapvar_mappropertyname + if g_varname is None: + g_varname = grid_mapping_name + if g_mapname is None: + # If you change this, it is no longer a valid grid-mapping var. + g_mapname = "grid_mapping_name" + + # Omit the earth radius, if requested. + if mapping_missingradius: + g_radius_string = "" + else: + g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" + g_string = f""" + int {g_varname} ; + {g_varname}:{g_mapname} = "{mapping_type_name}"; + {g_radius_string} + """ + + # Add a specified scale-factor, if requested. + if mapping_scalefactor is not None: + # Add a specific scale-factor term to the grid mapping. + # (Non-unity scale is not supported for Mercator/Stereographic). + sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN + g_string += f""" + {g_varname}:{sfapo_name} = {mapping_scalefactor} ; + """ + + # + # Add various additional (minimal) required properties for different + # grid mapping types. + # + + # Those which require 'latitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_TRANSVERSE, + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{latpo_name} = 0.0 ; + """ + # Those which require 'longitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{lonpo_name} = 0.0 ; + """ + # Those which require 'longitude of central meridian' + if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN + g_string += f""" + {g_varname}:{latcm_name} = 0.0 ; + """ + # Those which require 'perspective point height' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_VERTICAL, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + ): + pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT + g_string += f""" + {g_varname}:{pph_name} = 600000.0 ; + """ + # Those which require 'sweep angle axis' + if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS + g_string += f""" + {g_varname}:{saa_name} = "y" ; + """ + + # y-coord values + if yco_values is None: + yco_values = [10.0, 20.0] + yco_value_strings = [str(val) for val in yco_values] + yco_values_string = ", ".join(yco_value_strings) + + # Construct the total CDL string + cdl_string = f""" + netcdf test {{ + dimensions: + yco = 2 ; + xco = 3 ; + variables: + double phenom(yco, xco) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:grid_mapping = "grid" ; + double yco(yco) ; + yco:axis = "Y" ; + yco:units = "{yco_units}" ; + yco:standard_name = "{yco_name}" ; + double xco(xco) ; + xco:axis = "X" ; + xco:units = "{xco_units}" ; + xco:standard_name = "{xco_name}" ; + {g_string} + data: + yco = {yco_values_string} ; + xco = 100., 110., 120. ; + }} + """ + if self.debug: + print("File content:") + print(cdl_string) + print("------\n") + return cdl_string + + def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): + """ + Load the 'phenom' data variable in a CDL testcase, as a cube. + + Using ncgen, CFReader and the _load_cube call. + Can use a genuine Pyke engine, or the actions mimic engine, + selected by `self.use_pyke`. + + """ + # Write the CDL to a file. + with open(cdl_path, "w") as f_out: + f_out.write(cdl_string) + + # Create a netCDF file from the CDL file. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + subprocess.check_call(command, shell=True) + + # Simulate the inner part of the file reading process. + cf = CFReader(nc_path) + # Grab a data variable : FOR NOW, should be only 1 + cf_var = list(cf.cf_group.data_variables.values())[0] + cf_var = cf.cf_group.data_variables["phenom"] + + if self.use_pyke: + engine = iris.fileformats.netcdf._pyke_kb_engine_real() + else: + engine = iris.fileformats._nc_load_rules.engine.Engine() + + iris.fileformats.netcdf.DEBUG = self.debug + # iris.fileformats.netcdf.LOAD_PYKE = False + return _load_cube(engine, cf, cf_var, nc_path) + + def run_testcase(self, warning=None, **testcase_kwargs): + """ + Run a testcase with chosen options, returning a test cube. + + The kwargs apply to the '_make_testcase_cdl' method. + + """ + cdl_path = str(self.temp_dirpath / "test.cdl") + nc_path = cdl_path.replace(".cdl", ".nc") + cdl_string = self._make_testcase_cdl(**testcase_kwargs) + if warning is None: + context = self.assertNoWarningsRegexp() + else: + context = self.assertWarnsRegexp(warning) + with context: + cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) + if self.debug: + print("\nCube:") + print(cube) + print("") + return cube + + def check_result( + self, + cube, + cube_cstype=None, + cube_no_cs=False, + cube_no_xycoords=False, + xco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_is_aux=False, + xco_stdname=True, + yco_stdname=True, + ): + """ + Check key properties of a result cube. + + Various options control the expected things which are tested. + """ + self.assertEqual(cube.standard_name, "air_temperature") + self.assertEqual(cube.var_name, "phenom") + + x_coords = cube.coords(dimensions=(1,)) + y_coords = cube.coords(dimensions=(0,)) + if yco_is_aux: + expected_dim_coords = x_coords + expected_aux_coords = y_coords + else: + expected_dim_coords = x_coords + y_coords + expected_aux_coords = [] + + self.assertEqual( + set(expected_dim_coords), set(cube.coords(dim_coords=True)) + ) + if cube_no_xycoords: + self.assertEqual(expected_dim_coords, []) + x_coord = None + y_coord = None + else: + self.assertEqual(len(x_coords), 1) + (x_coord,) = x_coords + self.assertEqual(len(y_coords), 1) + (y_coord,) = y_coords + + self.assertEqual( + set(expected_aux_coords), set(cube.coords(dim_coords=False)) + ) + + if x_coord: + if xco_stdname is None: + # no check + pass + elif xco_stdname is True: + self.assertIsNotNone(x_coord.standard_name) + elif xco_stdname is False: + self.assertIsNone(x_coord.standard_name) + else: + self.assertEqual(x_coord.standard_name, xco_stdname) + + if y_coord: + if yco_stdname is None: + # no check + pass + if yco_stdname is True: + self.assertIsNotNone(y_coord.standard_name) + elif yco_stdname is False: + self.assertIsNone(y_coord.standard_name) + else: + self.assertEqual(y_coord.standard_name, yco_stdname) + + cube_cs = cube.coord_system() + if cube_no_xycoords: + yco_cs = None + xco_cs = None + else: + yco_cs = y_coord.coord_system + xco_cs = x_coord.coord_system + if cube_no_cs: + self.assertIsNone(cube_cs) + self.assertIsNone(yco_cs) + self.assertIsNone(xco_cs) + else: + if cube_cstype is not None: + self.assertIsInstance(cube_cs, cube_cstype) + if xco_no_cs: + self.assertIsNone(xco_cs) + else: + self.assertEqual(xco_cs, cube_cs) + if yco_no_cs: + self.assertIsNone(yco_cs) + else: + self.assertEqual(yco_cs, cube_cs) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py new file mode 100644 index 0000000000..0678e3b307 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py @@ -0,0 +1,56 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +For now, these tests are designed to function with **either** the "old" +Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the +"new" :mod:`iris.fileformats._nc_load_rules`. +Both of those supply an "engine" with an "activate" method + -- at least for now : may be simplified in future. + +""" +import iris.tests as tests + + +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) + + +class Test__additional(Mixin__nc_load_actions, tests.IrisTest): + # Run grid-mapping tests with non-Pyke (actions) + use_pyke = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + def test_nondim_lats(self): + # Check what happens when values don't allow a coord to be dim-coord. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_(latitude_longitude) + # 003 : fc_provides_coordinate_(latitude) + # 004 : fc_provides_coordinate_(longitude) + # 005 : fc_build_coordinate_(latitude) + # 006 : fc_build_coordinate_(longitude) + # NOTES: + # in terms of rule triggers, this is not distinct from a normal case + # - but the latitude is now an aux-coord. + warning = "must be.* monotonic" + result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) + self.check_result(result, yco_is_aux=True) + + +if __name__ == "__main__": + tests.main() diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py similarity index 53% rename from lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py rename to lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index 4d40370e56..ac495ac910 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/test__load_cube__activate.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -7,427 +7,20 @@ Unit tests for the engine.activate() call within the `iris.fileformats.netcdf._load_cube` function. -For now, these tests are designed to function with **either** the "old" -Pyke-rules implementation in :mod:`iris.fileformats._pyke_rules`, **or** the -"new" :mod:`iris.fileformats._nc_load_rules`. -Both of those supply an "activate" call (for now : may be simplified in future). +Here, *specifically* testcases relating to grid-mappings and dim-coords. """ import iris.tests as tests -from pathlib import Path -import shutil -import subprocess -import tempfile - import iris.coord_systems as ics -from iris.fileformats.cf import CFReader -import iris.fileformats.netcdf -from iris.fileformats.netcdf import _load_cube -import iris.fileformats._nc_load_rules.engine import iris.fileformats._nc_load_rules.helpers as hh -""" -Notes on testing method. - -IN cf : "def _load_cube(engine, cf, cf_var, filename)" -WHERE: - - engine is a :class:`pyke.knowledge_engine.engine` - -- **OR** :class:`iris.fileformats._nc_load_rules.engine.Engine` - - cf is a CFReader - - cf_var is a CFDAtaVariable - -As it's hard to construct a suitable CFReader from scratch, it would seem -simpler (for now) to use an ACTUAL FILE. -Likewise, the easiest approach to that is with CDL and "ncgen". -To do this, we need a test "fixture" that can create suitable test files in a -temporary directory. - -""" - - -class Mixin_Test__nc_load_actions: - """ - Class to make testcases for rules or actions code, and check results. - - Defines standard setUpClass/tearDownClass methods, to create a temporary - directory for intermediate files. - NOTE: owing to peculiarities of unittest, these must be explicitly called - from a setUpClass/tearDownClass within the 'final' inheritor, i.e. the - actual Test_XXX class which also inherits unittest.TestCase. - - Testcases are manufactured by the '_make_testcase_cdl' method. - These are based on a 'standard simple latlon grid' example. - Various kwargs control variations on this. - - The 'run_testcase' method takes the '_make_testcase_cdl' kwargs and makes - a result cube (by: producing cdl, converting to netcdf, and loading). - - The 'check_result' method performs various checks on the result, with - kwargs controlling the expected properties to be tested against. - This usage is *also* based on the 'standard simple latlon grid' example, - the kwargs specify expected differences from that. - - Can also test with either the Pyke(rules) or non-Pyke (actions) - implementations (for now). - - """ - - # - # "global" test settings - # - - # whether to test 'rules' or 'actions' implementations - # TODO: remove when Pyke is gone - use_pyke = True - - # whether to output various debug info - # TODO: ?possibly? remove when development is complete - debug = False - - @classmethod - def setUpClass(cls): - # # Control which testing method we are applying. - # Create a temp directory for temp files. - cls.temp_dirpath = Path(tempfile.mkdtemp()) - - @classmethod - def tearDownClass(cls): - # Destroy a temp directory for temp files. - shutil.rmtree(cls.temp_dirpath) - - def _make_testcase_cdl( - self, - latitude_units=None, - gridmapvar_name=None, - gridmapvar_mappropertyname=None, - mapping_missingradius=False, - mapping_type_name=None, - mapping_scalefactor=None, - yco_values=None, - xco_name=None, - yco_name=None, - xco_units=None, - yco_units=None, - ): - """ - Create a CDL string for a testcase. - - This is the "master" routine for creating all our testcases. - Kwarg options modify a simple default testcase with a latlon grid. - The routine handles the various testcase options and their possible - interactions. This includes knowing what extra changes are required - to support different grid-mapping types (for example). - - """ - # The grid-mapping options are standard-latlon, rotated, or non-latlon. - # This affects names+units of the X and Y coords. - # We don't have an option to *not* include a grid-mapping variable, but - # we can mimic a missing grid-mapping by changing the varname from that - # which the data-variable refers to, with "gridmapvar_name=xxx". - # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by - # selecting an unkown 'grid_mapping_name' property, with - # "gridmapvar_mappropertyname=xxx". - if mapping_type_name is None: - # Default grid-mapping and coords are standard lat-lon. - mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON - xco_name_default = hh.CF_VALUE_STD_NAME_LON - yco_name_default = hh.CF_VALUE_STD_NAME_LAT - xco_units_default = "degrees_east" - # Special kwarg overrides some of the values. - if latitude_units is None: - yco_units_default = "degrees_north" - else: - # Override the latitude units (to invalidate). - yco_units_default = latitude_units - - elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: - # Rotated lat-lon coordinates. - xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON - yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT - xco_units_default = "degrees" - yco_units_default = "degrees" - - else: - # General non-latlon coordinates - # Exactly which depends on the grid_mapping name. - xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X - yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y - xco_units_default = "m" - yco_units_default = "m" - - # Options can override coord (standard) names and units. - if xco_name is None: - xco_name = xco_name_default - if yco_name is None: - yco_name = yco_name_default - if xco_units is None: - xco_units = xco_units_default - if yco_units is None: - yco_units = yco_units_default - - grid_mapping_name = "grid" - # Options can override the gridvar name and properties. - g_varname = gridmapvar_name - g_mapname = gridmapvar_mappropertyname - if g_varname is None: - g_varname = grid_mapping_name - if g_mapname is None: - # If you change this, it is no longer a valid grid-mapping var. - g_mapname = "grid_mapping_name" - - # Omit the earth radius, if requested. - if mapping_missingradius: - g_radius_string = "" - else: - g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" - g_string = f""" - int {g_varname} ; - {g_varname}:{g_mapname} = "{mapping_type_name}"; - {g_radius_string} - """ - - # Add a specified scale-factor, if requested. - if mapping_scalefactor is not None: - # Add a specific scale-factor term to the grid mapping. - # (Non-unity scale is not supported for Mercator/Stereographic). - sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN - g_string += f""" - {g_varname}:{sfapo_name} = {mapping_scalefactor} ; - """ - - # - # Add various additional (minimal) required properties for different - # grid mapping types. - # +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) - # Those which require 'latitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_TRANSVERSE, - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{latpo_name} = 0.0 ; - """ - # Those which require 'longitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{lonpo_name} = 0.0 ; - """ - # Those which require 'longitude of central meridian' - if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): - latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN - g_string += f""" - {g_varname}:{latcm_name} = 0.0 ; - """ - # Those which require 'perspective point height' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_VERTICAL, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - ): - pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT - g_string += f""" - {g_varname}:{pph_name} = 600000.0 ; - """ - # Those which require 'sweep angle axis' - if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): - saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS - g_string += f""" - {g_varname}:{saa_name} = "y" ; - """ - - # y-coord values - if yco_values is None: - yco_values = [10.0, 20.0] - yco_value_strings = [str(val) for val in yco_values] - yco_values_string = ", ".join(yco_value_strings) - - # Construct the total CDL string - cdl_string = f""" - netcdf test {{ - dimensions: - yco = 2 ; - xco = 3 ; - variables: - double phenom(yco, xco) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - phenom:grid_mapping = "grid" ; - double yco(yco) ; - yco:axis = "Y" ; - yco:units = "{yco_units}" ; - yco:standard_name = "{yco_name}" ; - double xco(xco) ; - xco:axis = "X" ; - xco:units = "{xco_units}" ; - xco:standard_name = "{xco_name}" ; - {g_string} - data: - yco = {yco_values_string} ; - xco = 100., 110., 120. ; - }} - """ - if self.debug: - print("File content:") - print(cdl_string) - print("------\n") - return cdl_string - - def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): - """ - Load the 'phenom' data variable in a CDL testcase, as a cube. - - Using ncgen, CFReader and the _load_cube call. - Can use a genuine Pyke engine, or the actions mimic engine, - selected by `self.use_pyke`. - - """ - # Write the CDL to a file. - with open(cdl_path, "w") as f_out: - f_out.write(cdl_string) - - # Create a netCDF file from the CDL file. - command = "ncgen -o {} {}".format(nc_path, cdl_path) - subprocess.check_call(command, shell=True) - - # Simulate the inner part of the file reading process. - cf = CFReader(nc_path) - # Grab a data variable : FOR NOW, should be only 1 - cf_var = list(cf.cf_group.data_variables.values())[0] - cf_var = cf.cf_group.data_variables["phenom"] - - if self.use_pyke: - engine = iris.fileformats.netcdf._pyke_kb_engine_real() - else: - engine = iris.fileformats._nc_load_rules.engine.Engine() - - iris.fileformats.netcdf.DEBUG = self.debug - # iris.fileformats.netcdf.LOAD_PYKE = False - return _load_cube(engine, cf, cf_var, nc_path) - - def run_testcase(self, warning=None, **testcase_kwargs): - """ - Run a testcase with chosen options, returning a test cube. - - The kwargs apply to the '_make_testcase_cdl' method. - - """ - cdl_path = str(self.temp_dirpath / "test.cdl") - nc_path = cdl_path.replace(".cdl", ".nc") - cdl_string = self._make_testcase_cdl(**testcase_kwargs) - if warning is None: - context = self.assertNoWarningsRegexp() - else: - context = self.assertWarnsRegexp(warning) - with context: - cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) - if self.debug: - print("\nCube:") - print(cube) - print("") - return cube - - def check_result( - self, - cube, - cube_cstype=None, - cube_no_cs=False, - cube_no_xycoords=False, - xco_no_cs=False, # N.B. no effect if cube_no_cs is True - yco_no_cs=False, # N.B. no effect if cube_no_cs is True - yco_is_aux=False, - xco_stdname=True, - yco_stdname=True, - ): - """ - Check key properties of a result cube. - - Various options control the expected things which are tested. - """ - self.assertEqual(cube.standard_name, "air_temperature") - self.assertEqual(cube.var_name, "phenom") - - x_coords = cube.coords(dimensions=(1,)) - y_coords = cube.coords(dimensions=(0,)) - if yco_is_aux: - expected_dim_coords = x_coords - expected_aux_coords = y_coords - else: - expected_dim_coords = x_coords + y_coords - expected_aux_coords = [] - - self.assertEqual( - set(expected_dim_coords), set(cube.coords(dim_coords=True)) - ) - if cube_no_xycoords: - self.assertEqual(expected_dim_coords, []) - x_coord = None - y_coord = None - else: - self.assertEqual(len(x_coords), 1) - (x_coord,) = x_coords - self.assertEqual(len(y_coords), 1) - (y_coord,) = y_coords - - self.assertEqual( - set(expected_aux_coords), set(cube.coords(dim_coords=False)) - ) - if x_coord: - if xco_stdname is None: - # no check - pass - elif xco_stdname is True: - self.assertIsNotNone(x_coord.standard_name) - elif xco_stdname is False: - self.assertIsNone(x_coord.standard_name) - else: - self.assertEqual(x_coord.standard_name, xco_stdname) - - if y_coord: - if yco_stdname is None: - # no check - pass - if yco_stdname is True: - self.assertIsNotNone(y_coord.standard_name) - elif yco_stdname is False: - self.assertIsNone(y_coord.standard_name) - else: - self.assertEqual(y_coord.standard_name, yco_stdname) - - cube_cs = cube.coord_system() - if cube_no_xycoords: - yco_cs = None - xco_cs = None - else: - yco_cs = y_coord.coord_system - xco_cs = x_coord.coord_system - if cube_no_cs: - self.assertIsNone(cube_cs) - self.assertIsNone(yco_cs) - self.assertIsNone(xco_cs) - else: - if cube_cstype is not None: - self.assertIsInstance(cube_cs, cube_cstype) - if xco_no_cs: - self.assertIsNone(xco_cs) - else: - self.assertEqual(xco_cs, cube_cs) - if yco_no_cs: - self.assertIsNone(yco_cs) - else: - self.assertEqual(yco_cs, cube_cs) - - -class Mixin__grid_mapping(Mixin_Test__nc_load_actions): +class Mixin__grid_mapping(Mixin__nc_load_actions): # Various testcases for translation of grid-mappings def test_basic_latlon(self): @@ -875,35 +468,5 @@ def tearDownClass(cls): super().tearDownClass() -class Test__additional(Mixin_Test__nc_load_actions, tests.IrisTest): - # Run grid-mapping tests with non-Pyke (actions) - use_pyke = False - - @classmethod - def setUpClass(cls): - super().setUpClass() - - @classmethod - def tearDownClass(cls): - super().tearDownClass() - - def test_nondim_lats(self): - # Check what happens when values don't allow a coord to be dim-coord. - # - # Rules Triggered: - # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(latitude) - # 006 : fc_build_coordinate_(longitude) - # NOTES: - # in terms of rule triggers, this is not distinct from a normal case - # - but the latitude is now an aux-coord. - warning = "must be.* monotonic" - result = self.run_testcase(warning=warning, yco_values=[0.0, 0.0]) - self.check_result(result, yco_is_aux=True) - - if __name__ == "__main__": tests.main() From 6a3cad02d04edbfc394d293dc85e63f57f8c7f16 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 6 Jun 2021 23:18:35 +0100 Subject: [PATCH 17/35] Small fix to actions code. --- lib/iris/fileformats/_nc_load_rules/actions.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 38b2b96d51..bfe7999789 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -280,9 +280,10 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): rule_name = "fc_build_auxiliary_coordinate" # FOR NOW: attempt to identify type - # TODO: can maybe eventually remove this, as it only affects rule_name. + # TODO: eventually remove much of this, which only affects rule_name. # (but could possibly retain for future debugging purposes) coord_type = "" # unidentified : can be OK + coord_name = None if hh.is_time(engine, var_name): coord_type = "time" elif hh.is_time_period(engine, var_name): @@ -291,18 +292,22 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): coord_type = "longitude" if hh.is_rotated_longitude(engine, var_name): coord_type += "_rotated" + coord_name = hh.CF_VALUE_STD_NAME_GRID_LON + else: + coord_name = hh.CF_VALUE_STD_NAME_LON elif hh.is_latitude(engine, var_name): coord_type = "latitude" if hh.is_rotated_latitude(engine, var_name): coord_type += "_rotated" + coord_name = hh.CF_VALUE_STD_NAME_GRID_LAT + else: + coord_name = hh.CF_VALUE_STD_NAME_LAT if coord_type: rule_name += f"_{coord_type}" cf_var = engine.cf_var.cf_group.auxiliary_coordinates[var_name] - hh.build_auxiliary_coordinate( - engine, cf_var, coord_name=hh.CF_VALUE_STD_NAME_GRID_LON - ) + hh.build_auxiliary_coordinate(engine, cf_var, coord_name=coord_name) return rule_name From 911bf936688f0241f156173a43dc1ddc6f2ebeb3 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 6 Jun 2021 23:19:56 +0100 Subject: [PATCH 18/35] Move grid-mapping-specific content out of common load_cube__activate code. --- .../load_cube/load_cube__activate/__init__.py | 287 +----------------- .../load_cube__activate/test__additional.py | 6 +- .../test__grid_mappings.py | 287 +++++++++++++++++- 3 files changed, 298 insertions(+), 282 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index 6d6c1a92f4..a2f3d4ce31 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -23,7 +23,6 @@ import iris.fileformats.netcdf from iris.fileformats.netcdf import _load_cube import iris.fileformats._nc_load_rules.engine -import iris.fileformats._nc_load_rules.helpers as hh """ Notes on testing method. @@ -94,192 +93,7 @@ def tearDownClass(cls): # Destroy a temp directory for temp files. shutil.rmtree(cls.temp_dirpath) - def _make_testcase_cdl( - self, - latitude_units=None, - gridmapvar_name=None, - gridmapvar_mappropertyname=None, - mapping_missingradius=False, - mapping_type_name=None, - mapping_scalefactor=None, - yco_values=None, - xco_name=None, - yco_name=None, - xco_units=None, - yco_units=None, - ): - """ - Create a CDL string for a testcase. - - This is the "master" routine for creating all our testcases. - Kwarg options modify a simple default testcase with a latlon grid. - The routine handles the various testcase options and their possible - interactions. This includes knowing what extra changes are required - to support different grid-mapping types (for example). - - """ - # The grid-mapping options are standard-latlon, rotated, or non-latlon. - # This affects names+units of the X and Y coords. - # We don't have an option to *not* include a grid-mapping variable, but - # we can mimic a missing grid-mapping by changing the varname from that - # which the data-variable refers to, with "gridmapvar_name=xxx". - # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by - # selecting an unkown 'grid_mapping_name' property, with - # "gridmapvar_mappropertyname=xxx". - if mapping_type_name is None: - # Default grid-mapping and coords are standard lat-lon. - mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON - xco_name_default = hh.CF_VALUE_STD_NAME_LON - yco_name_default = hh.CF_VALUE_STD_NAME_LAT - xco_units_default = "degrees_east" - # Special kwarg overrides some of the values. - if latitude_units is None: - yco_units_default = "degrees_north" - else: - # Override the latitude units (to invalidate). - yco_units_default = latitude_units - - elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: - # Rotated lat-lon coordinates. - xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON - yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT - xco_units_default = "degrees" - yco_units_default = "degrees" - - else: - # General non-latlon coordinates - # Exactly which depends on the grid_mapping name. - xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X - yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y - xco_units_default = "m" - yco_units_default = "m" - - # Options can override coord (standard) names and units. - if xco_name is None: - xco_name = xco_name_default - if yco_name is None: - yco_name = yco_name_default - if xco_units is None: - xco_units = xco_units_default - if yco_units is None: - yco_units = yco_units_default - - grid_mapping_name = "grid" - # Options can override the gridvar name and properties. - g_varname = gridmapvar_name - g_mapname = gridmapvar_mappropertyname - if g_varname is None: - g_varname = grid_mapping_name - if g_mapname is None: - # If you change this, it is no longer a valid grid-mapping var. - g_mapname = "grid_mapping_name" - - # Omit the earth radius, if requested. - if mapping_missingradius: - g_radius_string = "" - else: - g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" - g_string = f""" - int {g_varname} ; - {g_varname}:{g_mapname} = "{mapping_type_name}"; - {g_radius_string} - """ - - # Add a specified scale-factor, if requested. - if mapping_scalefactor is not None: - # Add a specific scale-factor term to the grid mapping. - # (Non-unity scale is not supported for Mercator/Stereographic). - sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN - g_string += f""" - {g_varname}:{sfapo_name} = {mapping_scalefactor} ; - """ - - # - # Add various additional (minimal) required properties for different - # grid mapping types. - # - - # Those which require 'latitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_TRANSVERSE, - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{latpo_name} = 0.0 ; - """ - # Those which require 'longitude of projection origin' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_STEREO, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - hh.CF_GRID_MAPPING_VERTICAL, - ): - lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN - g_string += f""" - {g_varname}:{lonpo_name} = 0.0 ; - """ - # Those which require 'longitude of central meridian' - if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): - latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN - g_string += f""" - {g_varname}:{latcm_name} = 0.0 ; - """ - # Those which require 'perspective point height' - if mapping_type_name in ( - hh.CF_GRID_MAPPING_VERTICAL, - hh.CF_GRID_MAPPING_GEOSTATIONARY, - ): - pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT - g_string += f""" - {g_varname}:{pph_name} = 600000.0 ; - """ - # Those which require 'sweep angle axis' - if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): - saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS - g_string += f""" - {g_varname}:{saa_name} = "y" ; - """ - - # y-coord values - if yco_values is None: - yco_values = [10.0, 20.0] - yco_value_strings = [str(val) for val in yco_values] - yco_values_string = ", ".join(yco_value_strings) - - # Construct the total CDL string - cdl_string = f""" - netcdf test {{ - dimensions: - yco = 2 ; - xco = 3 ; - variables: - double phenom(yco, xco) ; - phenom:standard_name = "air_temperature" ; - phenom:units = "K" ; - phenom:grid_mapping = "grid" ; - double yco(yco) ; - yco:axis = "Y" ; - yco:units = "{yco_units}" ; - yco:standard_name = "{yco_name}" ; - double xco(xco) ; - xco:axis = "X" ; - xco:units = "{xco_units}" ; - xco:standard_name = "{xco_name}" ; - {g_string} - data: - yco = {yco_values_string} ; - xco = 100., 110., 120. ; - }} - """ - if self.debug: - print("File content:") - print(cdl_string) - print("------\n") - return cdl_string - - def _load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): + def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): """ Load the 'phenom' data variable in a CDL testcase, as a cube. @@ -326,100 +140,19 @@ def run_testcase(self, warning=None, **testcase_kwargs): else: context = self.assertWarnsRegexp(warning) with context: - cube = self._load_cube_from_cdl(cdl_string, cdl_path, nc_path) + cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) if self.debug: print("\nCube:") print(cube) print("") return cube - def check_result( - self, - cube, - cube_cstype=None, - cube_no_cs=False, - cube_no_xycoords=False, - xco_no_cs=False, # N.B. no effect if cube_no_cs is True - yco_no_cs=False, # N.B. no effect if cube_no_cs is True - yco_is_aux=False, - xco_stdname=True, - yco_stdname=True, - ): - """ - Check key properties of a result cube. + def _make_testcase_cdl(self, **kwargs): + """Make a testcase CDL string.""" + # Override for specific uses... + raise NotImplementedError() - Various options control the expected things which are tested. - """ - self.assertEqual(cube.standard_name, "air_temperature") - self.assertEqual(cube.var_name, "phenom") - - x_coords = cube.coords(dimensions=(1,)) - y_coords = cube.coords(dimensions=(0,)) - if yco_is_aux: - expected_dim_coords = x_coords - expected_aux_coords = y_coords - else: - expected_dim_coords = x_coords + y_coords - expected_aux_coords = [] - - self.assertEqual( - set(expected_dim_coords), set(cube.coords(dim_coords=True)) - ) - if cube_no_xycoords: - self.assertEqual(expected_dim_coords, []) - x_coord = None - y_coord = None - else: - self.assertEqual(len(x_coords), 1) - (x_coord,) = x_coords - self.assertEqual(len(y_coords), 1) - (y_coord,) = y_coords - - self.assertEqual( - set(expected_aux_coords), set(cube.coords(dim_coords=False)) - ) - - if x_coord: - if xco_stdname is None: - # no check - pass - elif xco_stdname is True: - self.assertIsNotNone(x_coord.standard_name) - elif xco_stdname is False: - self.assertIsNone(x_coord.standard_name) - else: - self.assertEqual(x_coord.standard_name, xco_stdname) - - if y_coord: - if yco_stdname is None: - # no check - pass - if yco_stdname is True: - self.assertIsNotNone(y_coord.standard_name) - elif yco_stdname is False: - self.assertIsNone(y_coord.standard_name) - else: - self.assertEqual(y_coord.standard_name, yco_stdname) - - cube_cs = cube.coord_system() - if cube_no_xycoords: - yco_cs = None - xco_cs = None - else: - yco_cs = y_coord.coord_system - xco_cs = x_coord.coord_system - if cube_no_cs: - self.assertIsNone(cube_cs) - self.assertIsNone(yco_cs) - self.assertIsNone(xco_cs) - else: - if cube_cstype is not None: - self.assertIsInstance(cube_cs, cube_cstype) - if xco_no_cs: - self.assertIsNone(xco_cs) - else: - self.assertEqual(xco_cs, cube_cs) - if yco_no_cs: - self.assertIsNone(yco_cs) - else: - self.assertEqual(yco_cs, cube_cs) + def check_result(self, cube, **kwargs): + """Test a result cube.""" + # Override for specific uses... + raise NotImplementedError() diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py index 0678e3b307..ce5b3ad7b7 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py @@ -17,12 +17,12 @@ import iris.tests as tests -from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( - Mixin__nc_load_actions, +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate.test__grid_mappings import ( + Mixin__grid_mapping, ) -class Test__additional(Mixin__nc_load_actions, tests.IrisTest): +class Test__additional(Mixin__grid_mapping, tests.IrisTest): # Run grid-mapping tests with non-Pyke (actions) use_pyke = False diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index ac495ac910..41b288195e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -21,6 +21,285 @@ class Mixin__grid_mapping(Mixin__nc_load_actions): + # Testcase support routines for testing translation of grid-mappings + def _make_testcase_cdl( + self, + latitude_units=None, + gridmapvar_name=None, + gridmapvar_mappropertyname=None, + mapping_missingradius=False, + mapping_type_name=None, + mapping_scalefactor=None, + yco_values=None, + xco_name=None, + yco_name=None, + xco_units=None, + yco_units=None, + ): + """ + Create a CDL string for a testcase. + + This is the "master" routine for creating all our testcases. + Kwarg options modify a simple default testcase with a latlon grid. + The routine handles the various testcase options and their possible + interactions. This includes knowing what extra changes are required + to support different grid-mapping types (for example). + + """ + # The grid-mapping options are standard-latlon, rotated, or non-latlon. + # This affects names+units of the X and Y coords. + # We don't have an option to *not* include a grid-mapping variable, but + # we can mimic a missing grid-mapping by changing the varname from that + # which the data-variable refers to, with "gridmapvar_name=xxx". + # Likewise, an invalid (unrecognised) grid-mapping can be mimicked by + # selecting an unkown 'grid_mapping_name' property, with + # "gridmapvar_mappropertyname=xxx". + if mapping_type_name is None: + # Default grid-mapping and coords are standard lat-lon. + mapping_type_name = hh.CF_GRID_MAPPING_LAT_LON + xco_name_default = hh.CF_VALUE_STD_NAME_LON + yco_name_default = hh.CF_VALUE_STD_NAME_LAT + xco_units_default = "degrees_east" + # Special kwarg overrides some of the values. + if latitude_units is None: + yco_units_default = "degrees_north" + else: + # Override the latitude units (to invalidate). + yco_units_default = latitude_units + + elif mapping_type_name == hh.CF_GRID_MAPPING_ROTATED_LAT_LON: + # Rotated lat-lon coordinates. + xco_name_default = hh.CF_VALUE_STD_NAME_GRID_LON + yco_name_default = hh.CF_VALUE_STD_NAME_GRID_LAT + xco_units_default = "degrees" + yco_units_default = "degrees" + + else: + # General non-latlon coordinates + # Exactly which depends on the grid_mapping name. + xco_name_default = hh.CF_VALUE_STD_NAME_PROJ_X + yco_name_default = hh.CF_VALUE_STD_NAME_PROJ_Y + xco_units_default = "m" + yco_units_default = "m" + + # Options can override coord (standard) names and units. + if xco_name is None: + xco_name = xco_name_default + if yco_name is None: + yco_name = yco_name_default + if xco_units is None: + xco_units = xco_units_default + if yco_units is None: + yco_units = yco_units_default + + grid_mapping_name = "grid" + # Options can override the gridvar name and properties. + g_varname = gridmapvar_name + g_mapname = gridmapvar_mappropertyname + if g_varname is None: + g_varname = grid_mapping_name + if g_mapname is None: + # If you change this, it is no longer a valid grid-mapping var. + g_mapname = "grid_mapping_name" + + # Omit the earth radius, if requested. + if mapping_missingradius: + g_radius_string = "" + else: + g_radius_string = f"{g_varname}:earth_radius = 6.e6 ;" + g_string = f""" + int {g_varname} ; + {g_varname}:{g_mapname} = "{mapping_type_name}"; + {g_radius_string} + """ + + # Add a specified scale-factor, if requested. + if mapping_scalefactor is not None: + # Add a specific scale-factor term to the grid mapping. + # (Non-unity scale is not supported for Mercator/Stereographic). + sfapo_name = hh.CF_ATTR_GRID_SCALE_FACTOR_AT_PROJ_ORIGIN + g_string += f""" + {g_varname}:{sfapo_name} = {mapping_scalefactor} ; + """ + + # + # Add various additional (minimal) required properties for different + # grid mapping types. + # + + # Those which require 'latitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_TRANSVERSE, + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + latpo_name = hh.CF_ATTR_GRID_LAT_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{latpo_name} = 0.0 ; + """ + # Those which require 'longitude of projection origin' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_STEREO, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + hh.CF_GRID_MAPPING_VERTICAL, + ): + lonpo_name = hh.CF_ATTR_GRID_LON_OF_PROJ_ORIGIN + g_string += f""" + {g_varname}:{lonpo_name} = 0.0 ; + """ + # Those which require 'longitude of central meridian' + if mapping_type_name in (hh.CF_GRID_MAPPING_TRANSVERSE,): + latcm_name = hh.CF_ATTR_GRID_LON_OF_CENT_MERIDIAN + g_string += f""" + {g_varname}:{latcm_name} = 0.0 ; + """ + # Those which require 'perspective point height' + if mapping_type_name in ( + hh.CF_GRID_MAPPING_VERTICAL, + hh.CF_GRID_MAPPING_GEOSTATIONARY, + ): + pph_name = hh.CF_ATTR_GRID_PERSPECTIVE_HEIGHT + g_string += f""" + {g_varname}:{pph_name} = 600000.0 ; + """ + # Those which require 'sweep angle axis' + if mapping_type_name in (hh.CF_GRID_MAPPING_GEOSTATIONARY,): + saa_name = hh.CF_ATTR_GRID_SWEEP_ANGLE_AXIS + g_string += f""" + {g_varname}:{saa_name} = "y" ; + """ + + # y-coord values + if yco_values is None: + yco_values = [10.0, 20.0] + yco_value_strings = [str(val) for val in yco_values] + yco_values_string = ", ".join(yco_value_strings) + + # Construct the total CDL string + cdl_string = f""" + netcdf test {{ + dimensions: + yco = 2 ; + xco = 3 ; + variables: + double phenom(yco, xco) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:grid_mapping = "grid" ; + double yco(yco) ; + yco:axis = "Y" ; + yco:units = "{yco_units}" ; + yco:standard_name = "{yco_name}" ; + double xco(xco) ; + xco:axis = "X" ; + xco:units = "{xco_units}" ; + xco:standard_name = "{xco_name}" ; + {g_string} + data: + yco = {yco_values_string} ; + xco = 100., 110., 120. ; + }} + """ + if self.debug: + print("File content:") + print(cdl_string) + print("------\n") + return cdl_string + + def check_result( + self, + cube, + cube_cstype=None, + cube_no_cs=False, + cube_no_xycoords=False, + xco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_no_cs=False, # N.B. no effect if cube_no_cs is True + yco_is_aux=False, + xco_stdname=True, + yco_stdname=True, + ): + """ + Check key properties of a result cube. + + Various options control the expected things which are tested. + """ + self.assertEqual(cube.standard_name, "air_temperature") + self.assertEqual(cube.var_name, "phenom") + + x_coords = cube.coords(dimensions=(1,)) + y_coords = cube.coords(dimensions=(0,)) + if yco_is_aux: + expected_dim_coords = x_coords + expected_aux_coords = y_coords + else: + expected_dim_coords = x_coords + y_coords + expected_aux_coords = [] + + self.assertEqual( + set(expected_dim_coords), set(cube.coords(dim_coords=True)) + ) + if cube_no_xycoords: + self.assertEqual(expected_dim_coords, []) + x_coord = None + y_coord = None + else: + self.assertEqual(len(x_coords), 1) + (x_coord,) = x_coords + self.assertEqual(len(y_coords), 1) + (y_coord,) = y_coords + + self.assertEqual( + set(expected_aux_coords), set(cube.coords(dim_coords=False)) + ) + + if x_coord: + if xco_stdname is None: + # no check + pass + elif xco_stdname is True: + self.assertIsNotNone(x_coord.standard_name) + elif xco_stdname is False: + self.assertIsNone(x_coord.standard_name) + else: + self.assertEqual(x_coord.standard_name, xco_stdname) + + if y_coord: + if yco_stdname is None: + # no check + pass + if yco_stdname is True: + self.assertIsNotNone(y_coord.standard_name) + elif yco_stdname is False: + self.assertIsNone(y_coord.standard_name) + else: + self.assertEqual(y_coord.standard_name, yco_stdname) + + cube_cs = cube.coord_system() + if cube_no_xycoords: + yco_cs = None + xco_cs = None + else: + yco_cs = y_coord.coord_system + xco_cs = x_coord.coord_system + if cube_no_cs: + self.assertIsNone(cube_cs) + self.assertIsNone(yco_cs) + self.assertIsNone(xco_cs) + else: + if cube_cstype is not None: + self.assertIsInstance(cube_cs, cube_cstype) + if xco_no_cs: + self.assertIsNone(xco_cs) + else: + self.assertEqual(xco_cs, cube_cs) + if yco_no_cs: + self.assertIsNone(yco_cs) + else: + self.assertEqual(yco_cs, cube_cs) + + +class Mixin__grid_mapping__tests(Mixin__grid_mapping): # Various testcases for translation of grid-mappings def test_basic_latlon(self): @@ -438,7 +717,9 @@ def test_mapping__mismatch__nonll_coords_missing_system(self): ) -class Test__grid_mapping__pyke_rules(Mixin__grid_mapping, tests.IrisTest): +class Test__grid_mapping__pyke_rules( + Mixin__grid_mapping__tests, tests.IrisTest +): # Run grid-mapping tests with Pyke (rules) use_pyke = True @@ -455,7 +736,9 @@ def tearDownClass(cls): @skip -class Test__grid_mapping__nonpyke_actions(Mixin__grid_mapping, tests.IrisTest): +class Test__grid_mapping__nonpyke_actions( + Mixin__grid_mapping__tests, tests.IrisTest +): # Run grid-mapping tests with non-Pyke (actions) use_pyke = False From 81f22cd3205a56bc6517dcef77e027fa219dee80 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Sun, 6 Jun 2021 23:23:38 +0100 Subject: [PATCH 19/35] Add tests for time rules. --- .../load_cube__activate/test__time_coords.py | 458 ++++++++++++++++++ 1 file changed, 458 insertions(+) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py new file mode 100644 index 0000000000..0102be1817 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -0,0 +1,458 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Tests for rules activation relating to 'time' and 'time_period' coords. + +""" +import iris.tests as tests + + +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) + + +_OPTIONS_TEMPLATE = { + "which": "", # set to "something" + "stdname": "_auto_which", # default = time / time_period + "varname": "_as_which", # default = time / period + "dimname": "_as_which", + "in_phenomvar_dims": True, + "in_phenomvar_coords": False, # set for an aux-coord + "values_all_zero": False, # set to block CFDimensionVariable identity + "units": "_auto_which", # specific to time/period +} + + +class Opts: + # A dict-like thing initialised from the _OPTIONS settings. + # But using '.' access in place of indexing + def __init__(self, **kwargs): + opts = _OPTIONS_TEMPLATE.copy() + opts.update(kwargs) + self._opts = opts + + def __getattr__(self, item): + return self._opts[item] + + def __setattr__(self, key, value): + if key == "_opts": + # Avoid the infinite loop when setting up "self.opt=opts". + super().__setattr__(key, value) + else: + self._opts[key] = value + + def update(self, **kwargs): + self._opts.update(kwargs) + + +class Mixin__timecoords__common(Mixin__nc_load_actions): + def _make_testcase_cdl( + self, + phenom_dims="_auto", # =get from time+period opts + phenom_coords="_auto", # =get from time+period opts + time_opts=None, + period_opts=None, + timedim_name="time", + perioddim_name="period", + ): + opt_t = None + opt_p = None + if time_opts is not None: + # Replace 'True' with options dict for 'time' options + opt_t = Opts(which="time", **time_opts) + if period_opts is not None: + # Replace 'True' with options dict for 'period' options + opt_p = Opts(which="period", **period_opts) + + # Define the 'standard' dimensions which we will create + # NB we don't necessarily *use* either of these + dims_and_lens = {timedim_name: 2, perioddim_name: 3} + dims_string = "\n".join( + [ + f" {name} = {length} ;" + for name, length in dims_and_lens.items() + ] + ) + + phenom_auto_dims = [] + phenom_auto_coords = [] + coord_variables_string = "" + data_string = "" + for opt in (opt_t, opt_p): + # Handle computed defaults and common info for both coord options. + if opt: + if opt.which not in ("time", "period"): + raise ValueError(f"unrecognised opt.which={opt.which}") + + # Do computed defaults. + if opt.stdname == "_auto_which": + if opt.which == "time": + opt.stdname = "time" + else: + assert opt.which == "period" + opt.stdname = "forecast_period" + if opt.varname == "_as_which": + opt.varname = opt.which + if opt.dimname == "_as_which": + opt.dimname = opt.which + if opt.units == "_auto_which": + if opt.which == "time": + opt.units = "hours since 2000-01-01" + else: + assert opt.which == "period" + opt.units = "hours" + + # Build 'auto' lists of phenom dims and (aux) coordinates. + if opt.in_phenomvar_dims: + phenom_auto_dims.append(opt.dimname) + if opt.in_phenomvar_coords: + phenom_auto_coords.append(opt.varname) + + # Add a definition of the coord variable. + coord_variables_string += f""" + double {opt.varname}({opt.dimname}) ; + {opt.varname}:standard_name = "{opt.stdname}" ; + {opt.varname}:units = "{opt.units}" ; +""" + # NOTE: we don't bother with an 'axis' property. + # We can probe the behaviour we need without that, because we + # are *not* testing the cf.py categorisation code, or the + # helper "build_xxx" routines. + + # Define coord-var data values (so it can be a dimension). + varname = opt.varname + if opt.values_all_zero: + # Use 'values_all_zero' to prevent a dim-var from + # identifying as a CFDimensionCoordinate (as it is + # non-monotonic). + dim_vals = [0.0] * dims_and_lens[opt.dimname] + else: + # "otherwise", assign an ascending sequence. + dim_vals = range(dims_and_lens[opt.dimname]) + dimvals_string = ", ".join(f"{val:0.1f}" for val in dim_vals) + data_string += f"\n {varname} = {dimvals_string} ;" + + if phenom_dims == "_auto": + phenom_dims = phenom_auto_dims + if not phenom_dims: + phenom_dims_string = "" + else: + phenom_dims_string = ", ".join(phenom_dims) + + if phenom_coords == "_auto": + phenom_coords = phenom_auto_coords + if not phenom_coords: + phenom_coords_string = "" + else: + phenom_coords_string = " ".join(phenom_coords) + phenom_coords_string = ( + " " + f'phenom:coordinates = "{phenom_coords_string}" ; ' + ) + + # Create a testcase with time dims + coords. + cdl_string = f""" +netcdf test {{ + dimensions: +{dims_string} + variables: + double phenom({phenom_dims_string}) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; +{phenom_coords_string} + +{coord_variables_string} + data: +{data_string} +}} +""" + if self.debug: + print("Testcase CDL string") + print(cdl_string) + print("----") + print("") + return cdl_string + + def check_result( + self, + cube, + time_is="dim", + period_is="missing", + time_name=None, + period_name=None, + time_class="_auto", + period_class="_auto", + ): + """ + Check presence of expected dim/aux-coords in the result cube. + + Both of 'time_is' and 'period_is' can take values 'dim', 'aux' or + 'missing'. + + """ + options = ("dim", "aux", "missing") + msg = f'Invalid "{{name}}" = {{opt}} : Not one of {options!r}.' + if time_is not in options: + raise ValueError(msg.format(name="time_is", opt=time_is)) + if period_is not in options: + raise ValueError(msg.format(name="period_is", opt=period_is)) + + # Get the facts we want to check + if time_name is None: + time_name = "time" + if period_name is None: + period_name = "forecast_period" + time_dimcos = cube.coords(time_name, dim_coords=True) + time_auxcos = cube.coords(time_name, dim_coords=False) + period_dimcos = cube.coords(period_name, dim_coords=True) + period_auxcos = cube.coords(period_name, dim_coords=False) + + if time_is == "dim": + self.assertEqual(len(time_dimcos), 1) + self.assertEqual(len(time_auxcos), 0) + elif time_is == "aux": + self.assertEqual(len(time_dimcos), 0) + self.assertEqual(len(time_auxcos), 1) + else: + self.assertEqual(len(time_dimcos), 0) + self.assertEqual(len(time_auxcos), 0) + + if period_is == "dim": + self.assertEqual(len(period_dimcos), 1) + self.assertEqual(len(period_auxcos), 0) + elif period_is == "aux": + self.assertEqual(len(period_dimcos), 0) + self.assertEqual(len(period_auxcos), 1) + else: + self.assertEqual(len(period_dimcos), 0) + self.assertEqual(len(period_auxcos), 0) + + +class Mixin__singlecoord__tests(Mixin__timecoords__common): + # Coordinate tests to be run for both 'time' and 'period' coordinate vars. + use_pyke = True + debug = False + # Set (in inheritors) to select time/period testing. + which = None + + def run_testcase(self, coord_dim_name=None, **opts): + """ + Specialise 'run_testcase' for single-coord 'time' or 'period' testing. + """ + which = self.which + assert which in ("time", "period") + + # Separate the 'Opt' keywords from "others" : others are passed + # directly to the parent routine, whereas 'Opt' ones are passed to + # 'time_opts' / 'period_opts' keys accordingly. + general_opts = {} + for key, value in list(opts.items()): + if key not in _OPTIONS_TEMPLATE.keys(): + del opts[key] + general_opts[key] = value + + if coord_dim_name is not None: + # Translate this into one of timedim_name/perioddim_name + general_opts[f"{which}dim_name"] = coord_dim_name + + period_opts = None + time_opts = None + if which == "time": + time_opts = opts + else: + period_opts = opts + + result = super().run_testcase( + time_opts=time_opts, period_opts=period_opts, **general_opts + ) + + return result + + def check_result(self, cube, coord_is="dim"): + """ + Specialise 'check_result' for single-coord 'time' or 'period' testing. + """ + # Pass generic 'coord_is' option to parent as time/period options. + which = self.which + assert which in ("time", "period") + + if which == "time": + time_is = coord_is + period_is = "missing" + else: + period_is = coord_is + time_is = "missing" + + super().check_result(cube, time_is=time_is, period_is=period_is) + + # + # Generic single-coordinate testcases. + # ( these are repeated for both 'time' and 'time_period' ) + # + + def test_dimension(self): + # Coord is a normal dimension --> dimcoord + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_build_coordinate_time + result = self.run_testcase() + self.check_result(result, "dim") + + def test_dimension_in_phenom_coords(self): + # Dimension coord also present in phenom:coords. + # Strictly wrong but a common error in datafiles : must tolerate. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_build_coordinate_time + result = self.run_testcase(in_phenomvar_coords=True) + self.check_result(result, "dim") + + def test_dim_nonmonotonic(self): + # Coord has all-zero values, which prevents it being a dimcoord. + # The rule has a special way of treating it as an aux coord + # -- even though it doesn't appear in the phenom coords. + # ( Done by the build_coord routine, so not really a rules issue). + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_build_coordinate_time + msg = "Failed to create.* dimension coordinate" + result = self.run_testcase(values_all_zero=True, warning=msg) + self.check_result(result, "aux") + + def test_dim_fails_typeident(self): + # The coord variable is identified as a CFDimensionCoordinate by cf.py, + # but having the wrong units causes it to fail the 'is_time' or + # 'is_period' test, so the 'provides_coord' rule fails to trigger. + # So it is built as a 'miscellaneous' dim-coord. + # N.B. this makes *no* practical difference, because a 'misc' dim + # coord is still a dim coord (albeit with bad units). + # ( N.B.#2 Not quite the same for lat/lon coords, where coord-specific + # 'build' rules always use a fixed standard-name ). + # Rules Triggered: + # 001 : fc_default + # 002 : fc_default_coordinate_(provide-phase) + # 003 : fc_build_coordinate_(miscellaneous) + result = self.run_testcase(units="1") + self.check_result(result, "dim") + + def test_aux(self): + # time/period is installed as an auxiliary coord. + # For this, rename both DIMENSIONS, so that the generated coords are + # not actually CF coordinates. + # For a valid case, we must *also* have a ref in phenom:coordinates + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate_time + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=True, + ) + self.check_result(result, "aux") + + def test_aux_not_in_phenom_coords(self): + # time/period is installed as an auxiliary coord, + # but we DIDN'T list it in phenom:coords -- otherwise as previous. + # Should have no result at all. + # Rules Triggered: + # 001 : fc_default + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=False, + ) # "should" be True for an aux-coord + self.check_result(result, "missing") + + def test_aux_fails_typeident(self): + # The coord variable is identified as a CFAuxiliaryCoordinate by cf.py, + # but having the wrong units causes it to fail the 'is_time' or + # 'is_period' test, so the 'provides_coord' rule fails to trigger. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # Again, though it builds as a 'miscellaneous' rather than a recognised + # specific coord type, it makes no practical difference. + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=True, + units="1", + ) + self.check_result(result, "aux") + + def test_aux_no_coordsref(self): + # The coord variable is identified as a CFAuxiliaryCoordinate by cf.py, + # but having the wrong units causes it to fail the 'is_time' or + # 'is_period' test. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # Again, though it builds as a 'miscellaneous' rather than a reocgnised + # specific coord type, it makes no practical difference. + result = self.run_testcase( + coord_dim_name="dim_renamed", + dimname="dim_renamed", + in_phenomvar_coords=True, + units="1", + ) + self.check_result(result, "aux") + + +class Test__time(Mixin__singlecoord__tests, tests.IrisTest): + # Run 'time' coord tests + which = "time" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +class Test__period(Mixin__singlecoord__tests, tests.IrisTest): + # Run 'time_period' coord tests + which = "time" + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +class Mixin__dualcoord__tests(Mixin__timecoords__common, tests.IrisTest): + # Coordinate test for combination of 'time' and 'time_period'. + # Not strictly necessary, as handling is independent, but a handy check + # on typical usage. + use_pyke = True + debug = False + + def test_time_and_period(self): + # Test case with both 'time' and 'period', with separate dims. + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_provides_coordinate_time_period + # 004 : fc_build_coordinate_time + # 005 : fc_build_coordinate_time_period + result = self.run_testcase(time_opts={}, period_opts={}) + self.check_result(result, time_is="dim", period_is="dim") + + +if __name__ == "__main__": + tests.main() From 8cf0f3a0a024e24e256e92c734455633a72d7d72 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 7 Jun 2021 00:10:38 +0100 Subject: [PATCH 20/35] Simplify and remove unused keys; check coord classes; test dim+aux shared dim. --- .../load_cube__activate/test__time_coords.py | 93 ++++++++++++------- 1 file changed, 57 insertions(+), 36 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py index 0102be1817..aac63982b7 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -12,31 +12,18 @@ """ import iris.tests as tests +from iris.coords import AuxCoord, DimCoord + from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( Mixin__nc_load_actions, ) -_OPTIONS_TEMPLATE = { - "which": "", # set to "something" - "stdname": "_auto_which", # default = time / time_period - "varname": "_as_which", # default = time / period - "dimname": "_as_which", - "in_phenomvar_dims": True, - "in_phenomvar_coords": False, # set for an aux-coord - "values_all_zero": False, # set to block CFDimensionVariable identity - "units": "_auto_which", # specific to time/period -} - - class Opts: - # A dict-like thing initialised from the _OPTIONS settings. - # But using '.' access in place of indexing + # A dict-like thing which provides '.' access in place of indexing. def __init__(self, **kwargs): - opts = _OPTIONS_TEMPLATE.copy() - opts.update(kwargs) - self._opts = opts + self._opts = kwargs def __getattr__(self, item): return self._opts[item] @@ -52,6 +39,19 @@ def update(self, **kwargs): self._opts.update(kwargs) +# Per-coord options settings for testcase definitions. +_COORD_OPTIONS_TEMPLATE = { + "which": "", # set to "something" + "stdname": "_auto_which", # default = time / time_period + "varname": "_as_which", # default = time / period + "dimname": "_as_which", + "in_phenomvar_dims": True, + "in_phenomvar_coords": False, # set for an aux-coord + "values_all_zero": False, # set to block CFDimensionVariable identity + "units": "_auto_which", # specific to time/period +} + + class Mixin__timecoords__common(Mixin__nc_load_actions): def _make_testcase_cdl( self, @@ -65,11 +65,13 @@ def _make_testcase_cdl( opt_t = None opt_p = None if time_opts is not None: - # Replace 'True' with options dict for 'time' options - opt_t = Opts(which="time", **time_opts) + # Replace 'True' with an options dict for 'time' options + opt_t = Opts(**_COORD_OPTIONS_TEMPLATE) + opt_t.update(which="time", **time_opts) if period_opts is not None: - # Replace 'True' with options dict for 'period' options - opt_p = Opts(which="period", **period_opts) + # Replace 'True' with an options dict for 'period' options + opt_p = Opts(**_COORD_OPTIONS_TEMPLATE) + opt_p.update(which="period", **period_opts) # Define the 'standard' dimensions which we will create # NB we don't necessarily *use* either of these @@ -180,16 +182,7 @@ def _make_testcase_cdl( print("") return cdl_string - def check_result( - self, - cube, - time_is="dim", - period_is="missing", - time_name=None, - period_name=None, - time_class="_auto", - period_class="_auto", - ): + def check_result(self, cube, time_is="dim", period_is="missing"): """ Check presence of expected dim/aux-coords in the result cube. @@ -205,10 +198,8 @@ def check_result( raise ValueError(msg.format(name="period_is", opt=period_is)) # Get the facts we want to check - if time_name is None: - time_name = "time" - if period_name is None: - period_name = "forecast_period" + time_name = "time" + period_name = "forecast_period" time_dimcos = cube.coords(time_name, dim_coords=True) time_auxcos = cube.coords(time_name, dim_coords=False) period_dimcos = cube.coords(period_name, dim_coords=True) @@ -234,6 +225,17 @@ def check_result( self.assertEqual(len(period_dimcos), 0) self.assertEqual(len(period_auxcos), 0) + # Also check expected built Coord types. + if time_is == "dim": + self.assertIsInstance(time_dimcos[0], DimCoord) + elif time_is == "aux": + self.assertIsInstance(time_auxcos[0], AuxCoord) + + if period_is == "dim": + self.assertIsInstance(period_dimcos[0], DimCoord) + elif period_is == "aux": + self.assertIsInstance(period_auxcos[0], AuxCoord) + class Mixin__singlecoord__tests(Mixin__timecoords__common): # Coordinate tests to be run for both 'time' and 'period' coordinate vars. @@ -254,7 +256,7 @@ def run_testcase(self, coord_dim_name=None, **opts): # 'time_opts' / 'period_opts' keys accordingly. general_opts = {} for key, value in list(opts.items()): - if key not in _OPTIONS_TEMPLATE.keys(): + if key not in _COORD_OPTIONS_TEMPLATE.keys(): del opts[key] general_opts[key] = value @@ -453,6 +455,25 @@ def test_time_and_period(self): result = self.run_testcase(time_opts={}, period_opts={}) self.check_result(result, time_is="dim", period_is="dim") + def test_time_dim_period_aux(self): + # Test case with both 'time' and 'period' sharing a dim. + # Rules Triggered: + # 001 : fc_default + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_coordinate_time + # 003 : fc_build_auxiliary_coordinate_time_period + # 004 : fc_build_coordinate_time + result = self.run_testcase( + time_opts={}, + period_opts=dict( + dimname="time", + in_phenomvar_dims=False, + in_phenomvar_coords=True, + ), + ) + self.check_result(result, time_is="dim", period_is="aux") + if __name__ == "__main__": tests.main() From dafa0fc9c861363493b802d5777a0a698df40f9d Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Mon, 7 Jun 2021 10:10:40 +0100 Subject: [PATCH 21/35] Simpler 'Opts' implementation. --- .../load_cube__activate/test__time_coords.py | 21 ++++++------------- 1 file changed, 6 insertions(+), 15 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py index aac63982b7..0303aa5a51 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -20,23 +20,14 @@ ) -class Opts: +class Opts(dict): # A dict-like thing which provides '.' access in place of indexing. def __init__(self, **kwargs): - self._opts = kwargs - - def __getattr__(self, item): - return self._opts[item] - - def __setattr__(self, key, value): - if key == "_opts": - # Avoid the infinite loop when setting up "self.opt=opts". - super().__setattr__(key, value) - else: - self._opts[key] = value - - def update(self, **kwargs): - self._opts.update(kwargs) + # Init like a dict + super().__init__(**kwargs) + # Alias contents "self['key']", as properties "self.key" + # See: https://stackoverflow.com/a/14620633/2615050 + self.__dict__ = self # Per-coord options settings for testcase definitions. From eec2cc970d7303250981aaf731c35929b1d1fbd8 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 12:28:08 +0100 Subject: [PATCH 22/35] Tidy testing classes a bit. --- .../load_cube/load_cube__activate/__init__.py | 15 +++++++- .../test__grid_mappings.py | 5 +-- .../load_cube__activate/test__time_coords.py | 37 +++++++++++-------- 3 files changed, 36 insertions(+), 21 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index a2f3d4ce31..234d34aad4 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -122,8 +122,12 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): engine = iris.fileformats._nc_load_rules.engine.Engine() iris.fileformats.netcdf.DEBUG = self.debug - # iris.fileformats.netcdf.LOAD_PYKE = False - return _load_cube(engine, cf, cf_var, nc_path) + + # Call the main translation function-under-test. + cube = _load_cube(engine, cf, cf_var, nc_path) + + # Always returns a single cube. + return cube def run_testcase(self, warning=None, **testcase_kwargs): """ @@ -134,13 +138,20 @@ def run_testcase(self, warning=None, **testcase_kwargs): """ cdl_path = str(self.temp_dirpath / "test.cdl") nc_path = cdl_path.replace(".cdl", ".nc") + cdl_string = self._make_testcase_cdl(**testcase_kwargs) + if self.debug: + print("CDL file content:") + print(cdl_string) + print("------\n") + if warning is None: context = self.assertNoWarningsRegexp() else: context = self.assertWarnsRegexp(warning) with context: cube = self.load_cube_from_cdl(cdl_string, cdl_path, nc_path) + if self.debug: print("\nCube:") print(cube) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index 41b288195e..41fe6c4892 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -201,10 +201,6 @@ def _make_testcase_cdl( xco = 100., 110., 120. ; }} """ - if self.debug: - print("File content:") - print(cdl_string) - print("------\n") return cdl_string def check_result( @@ -722,6 +718,7 @@ class Test__grid_mapping__pyke_rules( ): # Run grid-mapping tests with Pyke (rules) use_pyke = True + debug = False @classmethod def setUpClass(cls): diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py index 0303aa5a51..a75d474d68 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -166,11 +166,6 @@ def _make_testcase_cdl( {data_string} }} """ - if self.debug: - print("Testcase CDL string") - print(cdl_string) - print("----") - print("") return cdl_string def check_result(self, cube, time_is="dim", period_is="missing"): @@ -230,8 +225,6 @@ def check_result(self, cube, time_is="dim", period_is="missing"): class Mixin__singlecoord__tests(Mixin__timecoords__common): # Coordinate tests to be run for both 'time' and 'period' coordinate vars. - use_pyke = True - debug = False # Set (in inheritors) to select time/period testing. which = None @@ -402,9 +395,11 @@ def test_aux_no_coordsref(self): self.check_result(result, "aux") -class Test__time(Mixin__singlecoord__tests, tests.IrisTest): +class Test__time__withpyke(Mixin__singlecoord__tests, tests.IrisTest): # Run 'time' coord tests which = "time" + use_pyke = True + debug = False @classmethod def setUpClass(cls): @@ -415,9 +410,11 @@ def tearDownClass(cls): super().tearDownClass() -class Test__period(Mixin__singlecoord__tests, tests.IrisTest): +class Test__period__withpyke(Mixin__singlecoord__tests, tests.IrisTest): # Run 'time_period' coord tests - which = "time" + which = "period" + use_pyke = True + debug = False @classmethod def setUpClass(cls): @@ -428,13 +425,10 @@ def tearDownClass(cls): super().tearDownClass() -class Mixin__dualcoord__tests(Mixin__timecoords__common, tests.IrisTest): - # Coordinate test for combination of 'time' and 'time_period'. +class Mixin__dualcoord__tests(Mixin__timecoords__common): + # Coordinate tests for a combination of 'time' and 'time_period'. # Not strictly necessary, as handling is independent, but a handy check # on typical usage. - use_pyke = True - debug = False - def test_time_and_period(self): # Test case with both 'time' and 'period', with separate dims. # Rules Triggered: @@ -466,5 +460,18 @@ def test_time_dim_period_aux(self): self.check_result(result, time_is="dim", period_is="aux") +class Test__dualcoord_tests__withpyke(Mixin__dualcoord__tests, tests.IrisTest): + use_pyke = True + debug = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + if __name__ == "__main__": tests.main() From 01e6cf2ccbbbdeb6a0af41908a17fe43015ed1aa Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 12:29:40 +0100 Subject: [PATCH 23/35] Tests for hybrid vertical coords. --- .../load_cube/load_cube__activate/__init__.py | 11 + .../test__hybrid_formulae.py | 225 ++++++++++++++++++ 2 files changed, 236 insertions(+) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index 234d34aad4..8bebb42439 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -126,6 +126,17 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): # Call the main translation function-under-test. cube = _load_cube(engine, cf, cf_var, nc_path) + # Record on the cube, which hybrid coord elements were identified + # by the rules operation. + # Unlike the other translations, _load_cube does *not* convert this + # information into actual cube elements. That is instead done by + # `iris.fileformats.netcdf._load_aux_factory`. + # For rules testing, it is anyway more convenient to deal with the raw + # data, as each factory type has different validity requirements to + # build it, and none of that is relevant to the rules operation. + cube._formula_type_name = engine.requires.get("formula_type") + cube._formula_terms_byname = engine.requires.get("formula_terms") + # Always returns a single cube. return cube diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py new file mode 100644 index 0000000000..c1a325925f --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__hybrid_formulae.py @@ -0,0 +1,225 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Test rules activation relating to hybrid vertical coordinates. + +""" +import iris.tests as tests + +import iris.fileformats._nc_load_rules.helpers as hh +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) + + +class Mixin__formulae_tests(Mixin__nc_load_actions): + def _make_testcase_cdl(self, formula_root_name=None, term_names=None): + """Construct a testcase CDL for data with hybrid vertical coords.""" + if formula_root_name is None: + formula_root_name = "atmosphere_hybrid_height_coordinate" + if term_names is None: + term_names = hh.CF_COORD_VERTICAL.get(formula_root_name) + if term_names is None: + # unsupported type : just make something up + term_names = ["term1"] + + terms_string = "" + phenom_coord_names = ["vert"] # always include the root variable + formula_term_strings = [] + for term_name in term_names: + term_varname = "v_" + term_name + phenom_coord_names.append(term_varname) + formula_term_strings.append(f"{term_name}: {term_varname}") + terms_string += f""" + double {term_varname}(h) ; + {term_varname}:long_name = "{term_name}_long_name" ; + {term_varname}:units = "m" ; +""" + + # remove the extra initial space from the formula terms string + phenom_coords_string = " ".join(phenom_coord_names) + formula_terms_string = " ".join(formula_term_strings) + # Create the main result string. + cdl_str = f""" +netcdf test {{ +dimensions: + h = 2 ; +variables: + double phenom(h) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; + phenom:coordinates = "{phenom_coords_string}" ; + double vert(h) ; + vert:standard_name = "{formula_root_name}" ; + vert:long_name = "hybrid_vertical" ; + vert:units = "m" ; + vert:formula_terms = "{formula_terms_string}" ; +{terms_string} +}} +""" + return cdl_str + + def check_result(self, cube, factory_type="_auto", formula_terms="_auto"): + """Check the result of a cube load with a hybrid vertical coord.""" + if factory_type == "_auto": + # replace with our 'default', which is hybrid-height. + # N.B. 'None' is different: it means expect *no* factory. + factory_type = "atmosphere_hybrid_height_coordinate" + self.assertEqual(cube._formula_type_name, factory_type) + + if formula_terms == "_auto": + # Set default terms-expected, according to the expected factory + # type. + if factory_type is None: + # If no factory, expect no identified terms. + formula_terms = [] + else: + # Expect the correct ones defined for the factory type. + formula_terms = hh.CF_COORD_VERTICAL[factory_type] + + # Compare the formula_terms list with the 'expected' ones. + # N.B. first make the 'expected' list lower case, as the lists in + # hh.CF_COORD_VERTICAL include uppercase, but rules outputs don't. + formula_terms = [term.lower() for term in formula_terms] + + # N.B. the terms dictionary can be missing, if there were none + actual_terms = cube._formula_terms_byname or {} + self.assertEqual(sorted(formula_terms), sorted(actual_terms.keys())) + + # Check that there is an aux-coord of the expected name for each term + for var_name in actual_terms.values(): + coords = cube.coords(var_name=var_name, dim_coords=False) + self.assertEqual(len(coords), 1) + + # + # Actual testcase routines + # + + def test_basic_hybridheight(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 005 : fc_build_auxiliary_coordinate + # 006 : fc_build_auxiliary_coordinate + # 007 : fc_formula_type_atmosphere_hybrid_sigma_pressure_coordinate + # 008 : fc_formula_terms + # 009 : fc_formula_terms + # 010 : fc_formula_terms + # 011 : fc_formula_terms + result = self.run_testcase() + self.check_result(result) + + def test_missing_term(self): + # Check behaviour when a term is missing. + # For the test, omit "orography", which is common in practice. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 005 : fc_formula_type_atmosphere_hybrid_height_coordinate + # 006 : fc_formula_terms + # 007 : fc_formula_terms + result = self.run_testcase( + term_names=["a", "b"] # missing the 'orog' term + ) + self.check_result(result, formula_terms=["a", "b"]) + + def test_no_terms(self): + # Check behaviour when *all* terms are missing. + # N.B. for any _actual_ type, this is probably invalid and would fail? + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + result = self.run_testcase( + formula_root_name="atmosphere_hybrid_height_coordinate", + term_names=[], + ) + # This does *not* trigger + # 'fc_formula_type_atmosphere_hybrid_height_coordinate' + # This is because, within the 'assert_case_specific_facts' routine, + # formula_roots are only recognised by scanning the identified + # formula_terms. + self.check_result(result, factory_type=None) + + def test_unrecognised_verticaltype(self): + # Set the root variable name to something NOT a recognised hybrid type. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_build_auxiliary_coordinate + # 003 : fc_build_auxiliary_coordinate + # 004 : fc_build_auxiliary_coordinate + # 005 : fc_formula_terms + # 006 : fc_formula_terms + result = self.run_testcase( + formula_root_name="unknown", term_names=["a", "b"] + ) + # Check that it picks up the terms, but *not* the factory root coord, + # which is simply discarded. + self.check_result(result, factory_type=None, formula_terms=["a", "b"]) + + +# Add in tests methods to exercise each (supported) vertical coordinate type +# individually. +# NOTE: hh.CF_COORD_VERTICAL lists all the valid types, but we don't yet +# support all of them. +_SUPPORTED_FORMULA_TYPES = ( + # NOTE: omit "atmosphere_hybrid_height_coordinate" : our basic testcase + "atmosphere_hybrid_sigma_pressure_coordinate", + "ocean_sigma_z_coordinate", + "ocean_sigma_coordinate", + "ocean_s_coordinate", + "ocean_s_coordinate_g1", + "ocean_s_coordinate_g2", +) +for hybrid_type in _SUPPORTED_FORMULA_TYPES: + + def construct_inner_func(hybrid_type): + term_names = hh.CF_COORD_VERTICAL[hybrid_type] + + def inner(self): + result = self.run_testcase( + formula_root_name=hybrid_type, term_names=term_names + ) + self.check_result( + result, factory_type=hybrid_type, formula_terms=term_names + ) + + return inner + + # Note: use an intermediate function to generate each test method, simply to + # generate a new local variable for 'hybrid_type' on each iteration. + # Otherwise all the test methods will refer to the *same* 'hybrid_type' + # variable, i.e. the loop variable, which does not work ! + method_name = f"test_{hybrid_type}_coord" + setattr( + Mixin__formulae_tests, method_name, construct_inner_func(hybrid_type) + ) + + +class Test__formulae__withpyke(Mixin__formulae_tests, tests.IrisTest): + use_pyke = True + debug = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + +if __name__ == "__main__": + tests.main() From a4a93a7e5e62ab883047b0387bce0f9ff356bf50 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 12:47:19 +0100 Subject: [PATCH 24/35] Small review changes. --- lib/iris/fileformats/_nc_load_rules/actions.py | 10 ++-------- lib/iris/fileformats/_nc_load_rules/helpers.py | 4 ++-- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index bfe7999789..c4f3990a85 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -48,8 +48,8 @@ from functools import wraps -def convert_actionname_to_rulename(func_name): - # Given the name of an action-func, return the name of the rule. +def _default_rulenamesfunc(func_name): + # A simple default function to deduce the rules-name from an action-name. funcname_prefix = "action_" rulename_prefix = "fc_" # To match existing behaviours rule_name = func_name @@ -60,12 +60,6 @@ def convert_actionname_to_rulename(func_name): return rule_name -def _default_rulenamesfunc(func_name): - # A simple default function to deduce the rules-name from an action-name. - rule_name = convert_actionname_to_rulename(func_name) - return rule_name - - def action_function(func): # Wrap an action function with some standard behaviour. # Notably : engages with the rules logging process. diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index 0ac1cb7472..ce7a194b35 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -4,8 +4,8 @@ # See COPYING and COPYING.LESSER in the root of the repository for full # licensing details. """ -All the pure-Python 'helper' functions which previously included in the Pyke -rules database. +All the pure-Python 'helper' functions which were previously included in the +Pyke rules database 'fc_rules_cf.krb'. Initially these haven't changed. The new rules approach is still calling most of them. From f2deb130093308344dc647c114bc3c8890e3c535 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 15:40:04 +0100 Subject: [PATCH 25/35] Regularise per-test records of rules triggered. --- .../load_cube__activate/test__additional.py | 13 +++++++------ .../load_cube__activate/test__time_coords.py | 5 ++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py index ce5b3ad7b7..957c736501 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__additional.py @@ -24,7 +24,8 @@ class Test__additional(Mixin__grid_mapping, tests.IrisTest): # Run grid-mapping tests with non-Pyke (actions) - use_pyke = False + use_pyke = True + debug = False @classmethod def setUpClass(cls): @@ -39,11 +40,11 @@ def test_nondim_lats(self): # # Rules Triggered: # 001 : fc_default - # 002 : fc_provides_grid_mapping_(latitude_longitude) - # 003 : fc_provides_coordinate_(latitude) - # 004 : fc_provides_coordinate_(longitude) - # 005 : fc_build_coordinate_(latitude) - # 006 : fc_build_coordinate_(longitude) + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_provides_coordinate_longitude + # 005 : fc_build_coordinate_latitude + # 006 : fc_build_coordinate_longitude # NOTES: # in terms of rule triggers, this is not distinct from a normal case # - but the latitude is now an aux-coord. diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py index a75d474d68..fa010f446d 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__time_coords.py @@ -326,9 +326,8 @@ def test_dim_fails_typeident(self): # ( N.B.#2 Not quite the same for lat/lon coords, where coord-specific # 'build' rules always use a fixed standard-name ). # Rules Triggered: - # 001 : fc_default - # 002 : fc_default_coordinate_(provide-phase) - # 003 : fc_build_coordinate_(miscellaneous) + # #001 : fc_default + # #002 : fc_default_coordinate result = self.run_testcase(units="1") self.check_result(result, "dim") From 5ff8528d2821e57e990767ca3c8ecb60cbe1830f Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 16:43:31 +0100 Subject: [PATCH 26/35] Added tests for auxiliary lat+lon coords. --- .../test__grid_mappings.py | 132 ++++++++++++++++-- 1 file changed, 123 insertions(+), 9 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index 41fe6c4892..4995cf5a34 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -35,6 +35,8 @@ def _make_testcase_cdl( yco_name=None, xco_units=None, yco_units=None, + xco_is_dim=True, + yco_is_dim=True, ): """ Create a CDL string for a testcase. @@ -92,6 +94,30 @@ def _make_testcase_cdl( if yco_units is None: yco_units = yco_units_default + phenom_auxcoord_names = [] + if xco_is_dim: + # xdim has same name as xco, making xco a dim-coord + xdim_name = "xco" + else: + # use alternate dim-name, and put xco on the 'coords' list + xdim_name = "xdim_altname" + phenom_auxcoord_names.append("xco") + if yco_is_dim: + # ydim has same name as yco, making yco a dim-coord + ydim_name = "yco" # This makes the X coord a dim-coord + else: + # use alternate dim-name, and put yco on the 'coords' list + ydim_name = "ydim_altname" # This makes the X coord a dim-coord + phenom_auxcoord_names.append("yco") + # Build a 'phenom:coords' string if needed. + if phenom_auxcoord_names: + phenom_coords_string = " ".join(phenom_auxcoord_names) + phenom_coords_string = f""" + phenom:coordinates = "{phenom_coords_string}" ; +""" + else: + phenom_coords_string = "" + grid_mapping_name = "grid" # Options can override the gridvar name and properties. g_varname = gridmapvar_name @@ -180,18 +206,19 @@ def _make_testcase_cdl( cdl_string = f""" netcdf test {{ dimensions: - yco = 2 ; - xco = 3 ; + {ydim_name} = 2 ; + {xdim_name} = 3 ; variables: - double phenom(yco, xco) ; + double phenom({ydim_name}, {xdim_name}) ; phenom:standard_name = "air_temperature" ; phenom:units = "K" ; phenom:grid_mapping = "grid" ; - double yco(yco) ; +{phenom_coords_string} + double yco({ydim_name}) ; yco:axis = "Y" ; yco:units = "{yco_units}" ; yco:standard_name = "{yco_name}" ; - double xco(xco) ; + double xco({xdim_name}) ; xco:axis = "X" ; xco:units = "{xco_units}" ; xco:standard_name = "{xco_name}" ; @@ -211,6 +238,7 @@ def check_result( cube_no_xycoords=False, xco_no_cs=False, # N.B. no effect if cube_no_cs is True yco_no_cs=False, # N.B. no effect if cube_no_cs is True + xco_is_aux=False, yco_is_aux=False, xco_stdname=True, yco_stdname=True, @@ -225,12 +253,16 @@ def check_result( x_coords = cube.coords(dimensions=(1,)) y_coords = cube.coords(dimensions=(0,)) + expected_dim_coords = [] + expected_aux_coords = [] if yco_is_aux: - expected_dim_coords = x_coords - expected_aux_coords = y_coords + expected_aux_coords += y_coords else: - expected_dim_coords = x_coords + y_coords - expected_aux_coords = [] + expected_dim_coords += y_coords + if xco_is_aux: + expected_aux_coords += x_coords + else: + expected_dim_coords += x_coords self.assertEqual( set(expected_dim_coords), set(cube.coords(dim_coords=True)) @@ -748,5 +780,87 @@ def tearDownClass(cls): super().tearDownClass() +class Mixin__aux_latlons(Mixin__grid_mapping): + # Testcases for translating auxiliary latitude+longitude variables + + def test_aux_lon(self): + # Change the name of xdim, and put xco on the coords list. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_build_auxiliary_coordinate_longitude + # 005 : fc_build_coordinate_latitude + result = self.run_testcase(xco_is_dim=False) + self.check_result(result, xco_is_aux=True, xco_no_cs=True) + + def test_aux_lat(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_provides_coordinate_longitude + # 004 : fc_build_auxiliary_coordinate_latitude + # 005 : fc_build_coordinate_longitude + result = self.run_testcase(yco_is_dim=False) + self.check_result(result, yco_is_aux=True, yco_no_cs=True) + + def test_aux_lat_and_lon(self): + # When *both* are aux, the grid-mapping is discarded. + # - as in this case there are then no dim-coords to reference it. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_latitude_longitude + # 003 : fc_build_auxiliary_coordinate_latitude + # 004 : fc_build_auxiliary_coordinate_longitude + result = self.run_testcase(xco_is_dim=False, yco_is_dim=False) + self.check_result( + result, xco_is_aux=True, yco_is_aux=True, cube_no_cs=True + ) + + def test_aux_lon_rotated(self): + # Same but with rotated-style lat + lon coords. + # + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_build_auxiliary_coordinate_longitude_rotated + # 005 : fc_build_coordinate_latitude_rotated + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_is_dim=False, + ) + self.check_result(result, xco_is_aux=True, xco_no_cs=True) + + def test_aux_lat_rotated(self): + # Rules Triggered: + # 001 : fc_default + # 002 : fc_provides_grid_mapping_rotated_latitude_longitude + # 003 : fc_provides_coordinate_latitude + # 004 : fc_build_auxiliary_coordinate_longitude_rotated + # 005 : fc_build_coordinate_latitude_rotated + result = self.run_testcase( + mapping_type_name=hh.CF_GRID_MAPPING_ROTATED_LAT_LON, + xco_is_dim=False, + ) + self.check_result(result, xco_is_aux=True, xco_no_cs=True) + + +class Test__aux_latlons__pyke_rules(Mixin__aux_latlons, tests.IrisTest): + # Run aux-latlons tests with Pyke (rules) + use_pyke = True + debug = False + + @classmethod + def setUpClass(cls): + super().setUpClass() + + @classmethod + def tearDownClass(cls): + super().tearDownClass() + + if __name__ == "__main__": tests.main() From 9d9c35066f7295c832ca2d9bc8357ebdcbc91cca Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Tue, 8 Jun 2021 21:27:45 +0100 Subject: [PATCH 27/35] Tests for remaining miscellaneous rules. --- .../test__miscellaneous.py | 216 ++++++++++++++++++ 1 file changed, 216 insertions(+) create mode 100644 lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py new file mode 100644 index 0000000000..d41b19e108 --- /dev/null +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__miscellaneous.py @@ -0,0 +1,216 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the LGPL license. +# See COPYING and COPYING.LESSER in the root of the repository for full +# licensing details. +""" +Unit tests for the engine.activate() call within the +`iris.fileformats.netcdf._load_cube` function. + +Tests for rules activation relating to some isolated aspects : + * UKMO um-specific metadata + * label coordinates + * cell measures + * ancillary variables + +""" +import iris.tests as tests + +from iris.coords import AuxCoord, CellMeasure, AncillaryVariable +from iris.fileformats.pp import STASH + +from iris.tests.unit.fileformats.netcdf.load_cube.load_cube__activate import ( + Mixin__nc_load_actions, +) + + +class Mixin__ukmo_attributes(Mixin__nc_load_actions): + # Tests for handling of the special UM-specific data-var attributes. + def _make_testcase_cdl(self, **add_attrs): + phenom_attrs_string = "" + for key, value in add_attrs.items(): + phenom_attrs_string += f""" + phenom:{key} = "{value}" ; +""" + + cdl_string = f""" +netcdf test {{ + dimensions: + xdim = 2 ; + variables: + double phenom(xdim) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; +{phenom_attrs_string} +}} +""" + return cdl_string + + def check_result(self, cube, stashcode=None, processflags=None): + cube_stashattr = cube.attributes.get("STASH") + cube_processflags = cube.attributes.get("ukmo__process_flags") + + if stashcode is not None: + self.assertIsInstance(cube_stashattr, STASH) + self.assertEqual(str(stashcode), str(cube_stashattr)) + else: + self.assertIsNone(cube_stashattr) + + if processflags is not None: + self.assertIsInstance(cube_processflags, tuple) + self.assertEqual(set(cube_processflags), set(processflags)) + else: + self.assertIsNone(cube_processflags) + + # + # Testcase routines + # + stashcode = "m01s02i034" # Just one valid STASH msi string for testing + + def test_stash(self): + cube = self.run_testcase(um_stash_source=self.stashcode) + self.check_result(cube, stashcode=self.stashcode) + + def test_stash_altname(self): + cube = self.run_testcase(ukmo__um_stash_source=self.stashcode) + self.check_result(cube, stashcode=self.stashcode) + + def test_stash_empty(self): + msg = "Expected STASH code MSI string" + with self.assertRaisesRegex(ValueError, msg): + self.run_testcase(ukmo__um_stash_source="") + + def test_stash_invalid(self): + msg = "Expected STASH code MSI string" + with self.assertRaisesRegex(ValueError, msg): + self.run_testcase(ukmo__um_stash_source="XXX") + + def test_processflags_single(self): + cube = self.run_testcase(ukmo__process_flags="this") + self.check_result(cube, processflags=["this"]) + + def test_processflags_multi_with_underscores(self): + flags_testinput = "this that_1 the_other_one x" + flags_expectresult = ["this", "that 1", "the other one", "x"] + cube = self.run_testcase(ukmo__process_flags=flags_testinput) + self.check_result(cube, processflags=flags_expectresult) + + def test_processflags_empty(self): + cube = self.run_testcase(ukmo__process_flags="") + expected_result = [""] # May seem odd, but that's what it does. + self.check_result(cube, processflags=expected_result) + + +class Test__ukmo_attributes__withpyke(Mixin__ukmo_attributes, tests.IrisTest): + use_pyke = True + + +class Mixin__labels_cellmeasures_ancils(Mixin__nc_load_actions): + # Tests for some simple rules that translate facts directly into cube data, + # with no alternative actions, complications or failure modes to test. + def _make_testcase_cdl( + self, + include_label=False, + include_cellmeasure=False, + include_ancil=False, + ): + + phenom_extra_attrs_string = "" + extra_vars_string = "" + + if include_label: + phenom_extra_attrs_string += """ + phenom:coordinates = "v_label" ; +""" + extra_vars_string += """ + char v_label(xdim, strdim) ; + v_label:long_name = "string data" ; +""" + + if include_cellmeasure: + # One simple case : a valid link + a variable definition. + phenom_extra_attrs_string += """ + phenom:cell_measures = "area: v_cellm" ; +""" + extra_vars_string += """ + double v_cellm(xdim) ; + v_cellm:long_name = "cell areas" ; +""" + + if include_ancil: + # One simple case : a valid link + a variable definition. + phenom_extra_attrs_string += """ + phenom:ancillary_variables = "v_ancil" ; +""" + extra_vars_string += """ + double v_ancil(xdim) ; + v_ancil:long_name = "ancillary values" ; +""" + cdl_string = f""" + netcdf test {{ + dimensions: + xdim = 2 ; + strdim = 5 ; + variables: + double phenom(xdim) ; + phenom:standard_name = "air_temperature" ; + phenom:units = "K" ; +{phenom_extra_attrs_string} +{extra_vars_string} + }} + """ + return cdl_string + + def check_result( + self, + cube, + expect_label=False, + expect_cellmeasure=False, + expect_ancil=False, + ): + label_coords = cube.coords(var_name="v_label") + if expect_label: + self.assertEqual(len(label_coords), 1) + (coord,) = label_coords + self.assertIsInstance(coord, AuxCoord) + self.assertEqual(coord.dtype.kind, "U") + else: + self.assertEqual(len(label_coords), 0) + + cell_measures = cube.cell_measures() + if expect_cellmeasure: + self.assertEqual(len(cell_measures), 1) + (cellm,) = cell_measures + self.assertIsInstance(cellm, CellMeasure) + else: + self.assertEqual(len(cell_measures), 0) + + ancils = cube.ancillary_variables() + if expect_ancil: + self.assertEqual(len(ancils), 1) + (ancil,) = ancils + self.assertIsInstance(ancil, AncillaryVariable) + else: + self.assertEqual(len(ancils), 0) + + def test_label(self): + cube = self.run_testcase(include_label=True) + self.check_result(cube, expect_label=True) + + def test_ancil(self): + cube = self.run_testcase(include_ancil=True) + self.check_result(cube, expect_ancil=True) + + def test_cellmeasure(self): + cube = self.run_testcase(include_cellmeasure=True) + self.check_result(cube, expect_cellmeasure=True) + + +class Test__labels_cellmeasures_ancils__withpyke( + Mixin__labels_cellmeasures_ancils, tests.IrisTest +): + use_pyke = True + + +if __name__ == "__main__": + tests.main() From 85752cacf088b3130e5ece45ca72f0042b560a1d Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 10:02:02 +0100 Subject: [PATCH 28/35] Review: fix typos. --- .../load_cube/load_cube__activate/test__grid_mappings.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py index 4995cf5a34..5a11a2cc88 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/test__grid_mappings.py @@ -100,15 +100,18 @@ def _make_testcase_cdl( xdim_name = "xco" else: # use alternate dim-name, and put xco on the 'coords' list + # This makes the X coord an aux-coord xdim_name = "xdim_altname" phenom_auxcoord_names.append("xco") if yco_is_dim: # ydim has same name as yco, making yco a dim-coord - ydim_name = "yco" # This makes the X coord a dim-coord + ydim_name = "yco" # This makes the Y coord a dim-coord else: # use alternate dim-name, and put yco on the 'coords' list - ydim_name = "ydim_altname" # This makes the X coord a dim-coord + # This makes the Y coord an aux-coord + ydim_name = "ydim_altname" phenom_auxcoord_names.append("yco") + # Build a 'phenom:coords' string if needed. if phenom_auxcoord_names: phenom_coords_string = " ".join(phenom_auxcoord_names) From e4c6c8fbd3913859191d4a6b09b03e4e55b02b9a Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 17:39:43 +0100 Subject: [PATCH 29/35] Add testing option to compare pyke and nonpyke loads. --- .../load_cube/load_cube__activate/__init__.py | 100 ++++++++++++++---- 1 file changed, 82 insertions(+), 18 deletions(-) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index 8bebb42439..6946c2819e 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -19,6 +19,8 @@ import subprocess import tempfile +import numpy as np + from iris.fileformats.cf import CFReader import iris.fileformats.netcdf from iris.fileformats.netcdf import _load_cube @@ -82,6 +84,9 @@ class Mixin__nc_load_actions: # TODO: ?possibly? remove when development is complete debug = False + # whether to perform action in both ways and compare results. + compare_pyke_nonpyke = True + @classmethod def setUpClass(cls): # # Control which testing method we are applying. @@ -116,29 +121,88 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): cf_var = list(cf.cf_group.data_variables.values())[0] cf_var = cf.cf_group.data_variables["phenom"] - if self.use_pyke: - engine = iris.fileformats.netcdf._pyke_kb_engine_real() - else: - engine = iris.fileformats._nc_load_rules.engine.Engine() + do_pyke = self.use_pyke or self.compare_pyke_nonpyke + do_nonpyke = not self.use_pyke or self.compare_pyke_nonpyke + if do_pyke: + pyke_engine = iris.fileformats.netcdf._pyke_kb_engine_real() + if do_nonpyke: + nonpyke_engine = iris.fileformats._nc_load_rules.engine.Engine() iris.fileformats.netcdf.DEBUG = self.debug - # Call the main translation function-under-test. - cube = _load_cube(engine, cf, cf_var, nc_path) - - # Record on the cube, which hybrid coord elements were identified - # by the rules operation. - # Unlike the other translations, _load_cube does *not* convert this - # information into actual cube elements. That is instead done by - # `iris.fileformats.netcdf._load_aux_factory`. - # For rules testing, it is anyway more convenient to deal with the raw - # data, as each factory type has different validity requirements to - # build it, and none of that is relevant to the rules operation. - cube._formula_type_name = engine.requires.get("formula_type") - cube._formula_terms_byname = engine.requires.get("formula_terms") + # Call the main translation function to load a single cube. + def load_single_cube(engine): + # _load_cube establishes per-cube facts, activates rules and + # produces an actual cube. + cube = _load_cube(engine, cf, cf_var, nc_path) + + # Also Record, on the cubes, which hybrid coord elements were identified + # by the rules operation. + # Unlike the other translations, _load_cube does *not* convert this + # information into actual cube elements. That is instead done by + # `iris.fileformats.netcdf._load_aux_factory`. + # For rules testing, it is anyway more convenient to deal with the raw + # data, as each factory type has different validity requirements to + # build it, and none of that is relevant to the rules operation. + cube._formula_type_name = engine.requires.get("formula_type") + cube._formula_terms_byname = engine.requires.get("formula_terms") + + return cube + + if do_pyke: + pyke_cube = load_single_cube(pyke_engine) + if do_nonpyke: + nonpyke_cube = load_single_cube(nonpyke_engine) + + # If requested, directly compare the pyke and non-pyke outputs. + if self.compare_pyke_nonpyke: + # Compare the loaded cubes from both engines. + print("\nPYKE-NONPYKE COMPARE") + + # First zap cube-data, as masked data does not compare well. + def unmask_cube(cube): + # preserve the original, we're going to realise.. + cube = cube.copy() + if isinstance(cube.data, np.ma.MaskedArray): + cube.data = cube.data.filled(0) + return cube + + pyke_cube_copy = unmask_cube(pyke_cube) + nonpyke_cube_copy = unmask_cube(nonpyke_cube) + if self.debug: + if nonpyke_cube_copy != pyke_cube_copy: + + def show_cube(cube): + result = str(cube) + result += "\n--coords--" + for coord in cube.coords(): + result += "\n " + str(coord) + result += "\n--attributes--" + if not cube.attributes: + result += "\n (none)" + else: + for key, value in cube.attributes.items(): + result += f"\n {key}: {value}" + return result + + print("\nPyke/nonpyke mismatch.") + print("Pyke cube:\n----") + print(show_cube(pyke_cube)) + print() + print("NONPyke cube:\n----") + print(show_cube(nonpyke_cube)) + print("") + else: + self.assertEqual(pyke_cube_copy, nonpyke_cube_copy) + + # Return the right thing, whether we did 'both' or not + if self.use_pyke: + result_cube = pyke_cube + else: + result_cube = nonpyke_cube # Always returns a single cube. - return cube + return result_cube def run_testcase(self, warning=None, **testcase_kwargs): """ From 62f2baef086ccaf06d7de0732b8d143865b3e805 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 17:44:59 +0100 Subject: [PATCH 30/35] Fixes to grid-mapping/dimcoord actions: passing all compare-tests. --- .../fileformats/_nc_load_rules/actions.py | 113 +++++++++++++++--- 1 file changed, 94 insertions(+), 19 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index c4f3990a85..07bd407940 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -182,17 +182,18 @@ def action_provides_grid_mapping(engine, gridmapping_fact): def action_provides_coordinate(engine, dimcoord_fact): (var_name,) = dimcoord_fact - # Identify the coord type - # N.B. *only* to "name" the rule, for debug : no functional need. + # Identify the "type" of a coordinate variable coord_type = None - if hh.is_latitude(engine, var_name): - coord_type = "latitude" - elif hh.is_longitude(engine, var_name): - coord_type = "longitude" - elif hh.is_rotated_latitude(engine, var_name): + # NOTE: must test for rotated cases *first*, as 'is_longitude' and + # 'is_latitude' functions also accept rotated cases. + if hh.is_rotated_latitude(engine, var_name): coord_type = "rotated_latitude" elif hh.is_rotated_longitude(engine, var_name): coord_type = "rotated_longitude" + elif hh.is_latitude(engine, var_name): + coord_type = "latitude" + elif hh.is_longitude(engine, var_name): + coord_type = "longitude" elif hh.is_time(engine, var_name): coord_type = "time" elif hh.is_time_period(engine, var_name): @@ -207,7 +208,7 @@ def action_provides_coordinate(engine, dimcoord_fact): # N.B. in the original rules, this does *not* trigger separate # 'provides' and 'build' phases : there is just a single # 'fc_default_coordinate' rule. - # Rationalise this for now by making it like the others. + # Rationalise this for now by making it more like the others. # FOR NOW: ~matching old code, but they could *all* be simplified. # TODO: combine 2 operation into 1 for ALL of these. coord_type = "miscellaneous" @@ -228,14 +229,14 @@ def action_provides_coordinate(engine, dimcoord_fact): # (@1) an (optional) fixed standard-name for the coordinate, or None # If None, the coordinate name is copied from the source variable _coordtype_to_gridtype_coordname = { - "latitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LAT), - "longitude": ("latitude_longitude", hh.CF_VALUE_STD_NAME_LON), + "latitude": ("latlon", hh.CF_VALUE_STD_NAME_LAT), + "longitude": ("latlon", hh.CF_VALUE_STD_NAME_LON), "rotated_latitude": ( - "rotated_latitude_longitude", + "rotated", hh.CF_VALUE_STD_NAME_GRID_LAT, ), "rotated_longitude": ( - "rotated_latitude_longitude", + "rotated", hh.CF_VALUE_STD_NAME_GRID_LON, ), "projection_x": ("projected", hh.CF_VALUE_STD_NAME_PROJ_X), @@ -251,20 +252,94 @@ def action_build_dimension_coordinate(engine, providescoord_fact): coord_type, var_name = providescoord_fact cf_var = engine.cf_var.cf_group[var_name] rule_name = f"fc_build_coordinate_({coord_type})" - grid_type, coord_name = _coordtype_to_gridtype_coordname[coord_type] - coord_system = None - if grid_type is not None: - # If a type is identified with a grid, use the coordinate system + coord_grid_class, coord_name = _coordtype_to_gridtype_coordname[coord_type] + if coord_grid_class is None: + # Coordinates not identified with a specific grid-type class (latlon, + # rotated or projected) are always built, but can have no coord-system. + coord_system = None # no coord-system can be used + succeed = True + else: + grid_classes = ("latlon", "rotated", "projected") + assert coord_grid_class in grid_classes + # If a coord is of a type identified with a grid, we may have a + # coordinate system (i.e. a valid grid-mapping). # N.B. this requires each grid-type identification to validate the # coord var (e.g. "is_longitude"). # Non-conforming lon/lat/projection coords will be classed as # dim-coords by cf.py, but 'action_provides_coordinate' will give them # a coord-type of 'miscellaneous' : hence, they have no coord-system. coord_system = engine.cube_parts.get("coordinate_system") - hh.build_dimension_coordinate( - engine, cf_var, coord_name=coord_name, coord_system=coord_system - ) + # Translate the specific grid-mapping type to a grid-class + if coord_system is None: + succeed = True + cs_gridclass = None + else: + gridtypes_factlist = engine.fact_list("grid-type") + (gridtypes_fact,) = gridtypes_factlist # only 1 fact + (cs_gridtype,) = gridtypes_fact # fact contains 1 term + # (i.e. one of latlon/rotated/prjected, like coord_grid_class) + if cs_gridtype == "latitude_longitude": + cs_gridclass = "latlon" + elif cs_gridtype == "rotated_latitude_longitude": + cs_gridclass = "rotated" + else: + # Other specific projections + assert cs_gridtype is not None + cs_gridclass = "projected" + + assert cs_gridclass in grid_classes + (None,) + + if coord_grid_class == "latlon": + if cs_gridclass == "latlon": + succeed = True + elif cs_gridclass is None: + succeed = True + rule_name += "(no-cs)" + elif cs_gridclass == "rotated": + # We disallow this case + succeed = False + else: + assert cs_gridclass == "projected" + # succeed, no error, but discards the coord-system + # TODO: could issue a warning in this case ? + succeed = True + coord_system = None + rule_name += "(no-cs : discarded projected cs)" + elif coord_grid_class == "rotated": + # For rotated, we also accept no coord-system, but do *not* accept + # the presence of an unsuitable type. + if cs_gridclass == "rotated": + succeed = True + rule_name += "(rotated)" + elif cs_gridclass is None: + succeed = True + rule_name += "(rotated no-cs)" + elif cs_gridclass == "latlon": + # We allow this, but discard the CS + succeed = False + rule_name += "(FAILED rotated with latlon-cs)" + else: + assert cs_gridclass == "projected" + succeed = True + coord_system = None + rule_name += "(rotated : discarded projected cs)" + elif coord_grid_class == "projected": + # In this case, can *only* build a coord at all if there is a + # coord-system of the correct class (i.e. 'projected'). + succeed = cs_gridclass == "projected" + if not succeed: + rule_name += "(FAILED projected coord with non-projected cs)" + else: + msg = ( + f'Unexpected coord grid-class "{coord_grid_class}" ' + f"for coord {var_name}." + ) + raise ValueError(msg) + if succeed: + hh.build_dimension_coordinate( + engine, cf_var, coord_name=coord_name, coord_system=coord_system + ) return rule_name From a364bf985d51f2d758f50b85e47ca17e09c1f803 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 18:52:50 +0100 Subject: [PATCH 31/35] Actions for remaining 'miscellaneous' behaviour: All tests passing, including pyke == nonpyke checks. --- .../fileformats/_nc_load_rules/actions.py | 88 ++++++++++++++++++- .../load_cube/load_cube__activate/__init__.py | 2 +- 2 files changed, 88 insertions(+), 2 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 07bd407940..97141c4e82 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -45,6 +45,7 @@ """ from . import helpers as hh +import iris.fileformats.pp as pp from functools import wraps @@ -381,7 +382,72 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): return rule_name +@action_function +def action_ukmo_stash(engine): + rule_name = "fc_attribute_ukmo__um_stash_source" + var = engine.cf_var + attr_name = "ukmo__um_stash_source" + attr_value = getattr(var, attr_name, None) + if attr_value is None: + attr_altname = "um_stash_source" # legacy form + attr_value = getattr(var, attr_altname, None) + if attr_value is None: + rule_name += "(NOT-TRIGGERED)" + else: + # No helper routine : just do it + engine.cube.attributes["STASH"] = pp.STASH.from_msi(attr_value) + + return rule_name + + +@action_function +def action_ukmo_processflags(engine): + rule_name = "fc_attribute_ukmo__process_flags" + var = engine.cf_var + attr_name = "ukmo__process_flags" + attr_value = getattr(var, attr_name, None) + if attr_value is None: + rule_name += "(NOT-TRIGGERED)" + else: + # No helper routine : just do it + flags = [x.replace("_", " ") for x in attr_value.split(" ")] + engine.cube.attributes["ukmo__process_flags"] = tuple(flags) + + return rule_name + + +@action_function +def action_build_cell_measure(engine, cellm_fact): + (var_name,) = cellm_fact + var = engine.cf_var.cf_group.cell_measures[var_name] + hh.build_cell_measures(engine, var) + + +@action_function +def action_build_ancil_var(engine, ancil_fact): + (var_name,) = ancil_fact + var = engine.cf_var.cf_group.ancillary_variables[var_name] + hh.build_ancil_var(engine, var) + + +@action_function +def action_build_label_coordinate(engine, label_fact): + (var_name,) = label_fact + var = engine.cf_var.cf_group.labels[var_name] + hh.build_auxiliary_coordinate(engine, var) + + def run_actions(engine): + """ + Run all actions for a cube. + + This is the top-level "activation" function which runs all the appropriate + rules actions to translate facts and build all the cube elements. + + The specific cube being translated is "engine.cube". + + """ + # default (all cubes) action, always runs action_default(engine) # This should run the default rules. @@ -400,7 +466,7 @@ def run_actions(engine): for dimcoord_fact in dimcoord_facts: action_provides_coordinate(engine, dimcoord_fact) - # build coordinates + # build (dimension) coordinates providescoord_facts = engine.fact_list("provides-coordinate-(oftype)") for providescoord_fact in providescoord_facts: action_build_dimension_coordinate(engine, providescoord_fact) @@ -409,3 +475,23 @@ def run_actions(engine): auxcoord_facts = engine.fact_list("auxiliary_coordinate") for auxcoord_fact in auxcoord_facts: action_build_auxiliary_coordinate(engine, auxcoord_fact) + + # Detect + process and special 'ukmo' attributes + # Run on every cube : they choose themselves whether to trigger. + action_ukmo_stash(engine) + action_ukmo_processflags(engine) + + # cell measures + cellm_facts = engine.fact_list("cell_measure") + for cellm_fact in cellm_facts: + action_build_cell_measure(engine, cellm_fact) + + # ancillary variables + ancil_facts = engine.fact_list("ancillary_variable") + for ancil_fact in ancil_facts: + action_build_ancil_var(engine, ancil_fact) + + # label coords + label_facts = engine.fact_list("label") + for label_fact in label_facts: + action_build_label_coordinate(engine, label_fact) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index 6946c2819e..c3329209ce 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -157,7 +157,7 @@ def load_single_cube(engine): # If requested, directly compare the pyke and non-pyke outputs. if self.compare_pyke_nonpyke: # Compare the loaded cubes from both engines. - print("\nPYKE-NONPYKE COMPARE") + # print("\nPYKE-NONPYKE COMPARE") # First zap cube-data, as masked data does not compare well. def unmask_cube(cube): From e805bc0c1b79fd56d343efd19c5eea7caca2fbc4 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Wed, 9 Jun 2021 23:29:49 +0100 Subject: [PATCH 32/35] Improved comments in actions routines. --- .../fileformats/_nc_load_rules/actions.py | 54 +++++++++++-------- 1 file changed, 32 insertions(+), 22 deletions(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 97141c4e82..227fea587e 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -27,7 +27,7 @@ other rule-type logic. Each 'action' function can replace several similar 'rules'. -E.G. 'action_provides_grid_mapping' replaces all 'fc_provides_grid+mapping_'. +E.G. 'action_provides_grid_mapping' replaces all 'fc_provides_grid_mapping_'. To aid debug, each returns a 'rule_name' string, indicating which original rule this particular action call is emulating : In some cases, this may include a textual note that this rule 'failed', aka "did not trigger", which would not be @@ -82,6 +82,7 @@ def inner(engine, *args, **kwargs): @action_function def action_default(engine): + """Standard operations for every cube.""" hh.build_cube_metadata(engine) @@ -90,7 +91,7 @@ def action_default(engine): # routines: # (@0) a validity-checker (or None) # (@1) a coord-system builder function. -_grid_types_to_checker_builder = { +_GRIDTYPE_CHECKER_AND_BUILDER = { hh.CF_GRID_MAPPING_LAT_LON: (None, hh.build_coordinate_system), hh.CF_GRID_MAPPING_ROTATED_LAT_LON: ( None, @@ -133,29 +134,27 @@ def action_default(engine): @action_function def action_provides_grid_mapping(engine, gridmapping_fact): + """Convert a CFGridMappingVariable into a cube coord-system.""" (var_name,) = gridmapping_fact rule_name = "fc_provides_grid_mapping" cf_var = engine.cf_var.cf_group[var_name] grid_mapping_type = getattr(cf_var, hh.CF_ATTR_GRID_MAPPING_NAME, None) + succeed = True if grid_mapping_type is None: succeed = False rule_name += " --FAILED(no grid-mapping attr)" else: grid_mapping_type = grid_mapping_type.lower() + if succeed: - if grid_mapping_type in _grid_types_to_checker_builder: - checker, builder = _grid_types_to_checker_builder[ - grid_mapping_type - ] + if grid_mapping_type in _GRIDTYPE_CHECKER_AND_BUILDER: + checker, builder = _GRIDTYPE_CHECKER_AND_BUILDER[grid_mapping_type] rule_name += f"_({grid_mapping_type})" else: succeed = False rule_name += f" --FAILED(unhandled type {grid_mapping_type})" - # We DON'T call this, as we already identified the type in the call. - # if succeed and not is_grid_mapping(engine, var_name, grid_mapping_type): - # succeed = False - # rule_name += f' --(FAILED is_grid_mapping)' + if succeed: if checker is not None and not checker(engine, var_name): succeed = False @@ -172,6 +171,7 @@ def action_provides_grid_mapping(engine, gridmapping_fact): f" --(FAILED overwrite coord-sytem " f"{old_gridtype} with {grid_mapping_type})" ) + if succeed: engine.cube_parts["coordinate_system"] = coordinate_system engine.add_fact("grid-type", (grid_mapping_type,)) @@ -181,6 +181,7 @@ def action_provides_grid_mapping(engine, gridmapping_fact): @action_function def action_provides_coordinate(engine, dimcoord_fact): + """Identify the coordinate 'type' of a CFCoordinateVariable.""" (var_name,) = dimcoord_fact # Identify the "type" of a coordinate variable @@ -224,12 +225,12 @@ def action_provides_coordinate(engine, dimcoord_fact): # Lookup table used by 'action_build_dimension_coordinate'. # Maps each supported coordinate-type name (a rules-internal concept) to a pair # of information values : -# (@0) the CF grid_mapping_name (or None) +# (@0) A grid "type", one of latlon/rotated/projected (or None) # If set, the cube should have a coord-system, which is set on the # resulting coordinate. If None, the coord has no coord_system. # (@1) an (optional) fixed standard-name for the coordinate, or None # If None, the coordinate name is copied from the source variable -_coordtype_to_gridtype_coordname = { +_COORDTYPE_GRIDTYPES_AND_COORDNAMES = { "latitude": ("latlon", hh.CF_VALUE_STD_NAME_LAT), "longitude": ("latlon", hh.CF_VALUE_STD_NAME_LON), "rotated_latitude": ( @@ -250,10 +251,13 @@ def action_provides_coordinate(engine, dimcoord_fact): @action_function def action_build_dimension_coordinate(engine, providescoord_fact): + """Convert a CFCoordinateVariable into a cube dim-coord.""" coord_type, var_name = providescoord_fact cf_var = engine.cf_var.cf_group[var_name] rule_name = f"fc_build_coordinate_({coord_type})" - coord_grid_class, coord_name = _coordtype_to_gridtype_coordname[coord_type] + coord_grid_class, coord_name = _COORDTYPE_GRIDTYPES_AND_COORDNAMES[ + coord_type + ] if coord_grid_class is None: # Coordinates not identified with a specific grid-type class (latlon, # rotated or projected) are always built, but can have no coord-system. @@ -275,10 +279,11 @@ def action_build_dimension_coordinate(engine, providescoord_fact): succeed = True cs_gridclass = None else: + # Get a grid-class from the grid-type + # i.e. one of latlon/rotated/projected, as for coord_grid_class. gridtypes_factlist = engine.fact_list("grid-type") (gridtypes_fact,) = gridtypes_factlist # only 1 fact (cs_gridtype,) = gridtypes_fact # fact contains 1 term - # (i.e. one of latlon/rotated/prjected, like coord_grid_class) if cs_gridtype == "latitude_longitude": cs_gridclass = "latlon" elif cs_gridtype == "rotated_latitude_longitude": @@ -299,6 +304,7 @@ def action_build_dimension_coordinate(engine, providescoord_fact): elif cs_gridclass == "rotated": # We disallow this case succeed = False + rule_name += "(FAILED : latlon coord with rotated cs)" else: assert cs_gridclass == "projected" # succeed, no error, but discards the coord-system @@ -307,8 +313,6 @@ def action_build_dimension_coordinate(engine, providescoord_fact): coord_system = None rule_name += "(no-cs : discarded projected cs)" elif coord_grid_class == "rotated": - # For rotated, we also accept no coord-system, but do *not* accept - # the presence of an unsuitable type. if cs_gridclass == "rotated": succeed = True rule_name += "(rotated)" @@ -316,14 +320,14 @@ def action_build_dimension_coordinate(engine, providescoord_fact): succeed = True rule_name += "(rotated no-cs)" elif cs_gridclass == "latlon": - # We allow this, but discard the CS + # We disallow this case succeed = False - rule_name += "(FAILED rotated with latlon-cs)" + rule_name += "(FAILED rotated coord with latlon cs)" else: assert cs_gridclass == "projected" succeed = True coord_system = None - rule_name += "(rotated : discarded projected cs)" + rule_name += "(rotated no-cs : discarded projected cs)" elif coord_grid_class == "projected": # In this case, can *only* build a coord at all if there is a # coord-system of the correct class (i.e. 'projected'). @@ -346,12 +350,12 @@ def action_build_dimension_coordinate(engine, providescoord_fact): @action_function def action_build_auxiliary_coordinate(engine, auxcoord_fact): + """Convert a CFAuxiliaryCoordinateVariable into a cube aux-coord.""" (var_name,) = auxcoord_fact rule_name = "fc_build_auxiliary_coordinate" - # FOR NOW: attempt to identify type - # TODO: eventually remove much of this, which only affects rule_name. - # (but could possibly retain for future debugging purposes) + # Identify any known coord "type" : latitude/longitude/time/time_period + # If latitude/longitude, this sets the standard_name of the built AuxCoord coord_type = "" # unidentified : can be OK coord_name = None if hh.is_time(engine, var_name): @@ -384,6 +388,7 @@ def action_build_auxiliary_coordinate(engine, auxcoord_fact): @action_function def action_ukmo_stash(engine): + """Convert 'ukmo stash' cf property into a cube attribute.""" rule_name = "fc_attribute_ukmo__um_stash_source" var = engine.cf_var attr_name = "ukmo__um_stash_source" @@ -402,6 +407,7 @@ def action_ukmo_stash(engine): @action_function def action_ukmo_processflags(engine): + """Convert 'ukmo process flags' cf property into a cube attribute.""" rule_name = "fc_attribute_ukmo__process_flags" var = engine.cf_var attr_name = "ukmo__process_flags" @@ -418,6 +424,7 @@ def action_ukmo_processflags(engine): @action_function def action_build_cell_measure(engine, cellm_fact): + """Convert a CFCellMeasureVariable into a cube cell-measure.""" (var_name,) = cellm_fact var = engine.cf_var.cf_group.cell_measures[var_name] hh.build_cell_measures(engine, var) @@ -425,6 +432,7 @@ def action_build_cell_measure(engine, cellm_fact): @action_function def action_build_ancil_var(engine, ancil_fact): + """Convert a CFAncillaryVariable into a cube ancil-var.""" (var_name,) = ancil_fact var = engine.cf_var.cf_group.ancillary_variables[var_name] hh.build_ancil_var(engine, var) @@ -432,6 +440,7 @@ def action_build_ancil_var(engine, ancil_fact): @action_function def action_build_label_coordinate(engine, label_fact): + """Convert a CFLabelVariable into a cube string-type aux-coord.""" (var_name,) = label_fact var = engine.cf_var.cf_group.labels[var_name] hh.build_auxiliary_coordinate(engine, var) @@ -467,6 +476,7 @@ def run_actions(engine): action_provides_coordinate(engine, dimcoord_fact) # build (dimension) coordinates + # The 'provides' step and the grid-mapping must have already been done. providescoord_facts = engine.fact_list("provides-coordinate-(oftype)") for providescoord_fact in providescoord_facts: action_build_dimension_coordinate(engine, providescoord_fact) From 3e1575087884cb9e83c85758f30d0de4a735d938 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 10 Jun 2021 17:54:20 +0100 Subject: [PATCH 33/35] Added actions for formulae (aka hybrid coords, factories). --- .../fileformats/_nc_load_rules/actions.py | 56 ++++++++++++++++++- 1 file changed, 55 insertions(+), 1 deletion(-) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 227fea587e..568e01f84d 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -45,9 +45,12 @@ """ from . import helpers as hh -import iris.fileformats.pp as pp + from functools import wraps +import iris.fileformats.pp as pp +import iris.fileformats.cf + def _default_rulenamesfunc(func_name): # A simple default function to deduce the rules-name from an action-name. @@ -446,6 +449,46 @@ def action_build_label_coordinate(engine, label_fact): hh.build_auxiliary_coordinate(engine, var) +@action_function +def action_formula_type(engine, formula_root_fact): + """Register a CFVariable as a formula root.""" + rule_name = "fc_formula_type" + (var_name,) = formula_root_fact + cf_var = engine.cf_var.cf_group[var_name] + # var.standard_name is a formula type (or we should never get here). + formula_type = getattr(cf_var, "standard_name", None) + succeed = True + if formula_type not in iris.fileformats.cf.reference_terms: + succeed = False + rule_name += f"(FAILED - unrecognised formula type = {formula_type!r})" + if succeed: + # Check we don't already have one. + existing_type = engine.requires.get("formula_type") + if existing_type: + succeed = False + rule_name += ( + f"(FAILED - new formula type ={formula_type!r} " + f"collided with existing one ={existing_type!r}.)" + ) + if succeed: + rule_name += f"_{formula_type}" + # Set 'requires' info for iris.fileformats.netcdf._load_aux_factory. + engine.requires["formula_type"] = formula_type + + return rule_name + + +@action_function +def action_formula_term(engine, formula_term_fact): + """Register a CFVariable as a formula term.""" + # Must run AFTER formula root identification. + (termvar_name, rootvar_name, term_name) = formula_term_fact + # The rootname is implicit : have only one per cube + # TODO: change when we adopt cf-1.7 advanced grid-mping syntax + engine.requires.setdefault("formula_terms", {})[term_name] = termvar_name + rule_name = f"fc_formula_term({term_name})" + + def run_actions(engine): """ Run all actions for a cube. @@ -505,3 +548,14 @@ def run_actions(engine): label_facts = engine.fact_list("label") for label_fact in label_facts: action_build_label_coordinate(engine, label_fact) + + # formula root variables + formula_root_facts = engine.fact_list("formula_root") + for root_fact in formula_root_facts: + action_formula_type(engine, root_fact) + + # formula terms + # The 'formula_root's must have already been done. + formula_term_facts = engine.fact_list("formula_term") + for term_fact in formula_term_facts: + action_formula_term(engine, term_fact) From 7c206eee6f178ad3613d6c08aceb520dee0d9d17 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Thu, 10 Jun 2021 21:56:23 +0100 Subject: [PATCH 34/35] Tiny fix. --- lib/iris/fileformats/_nc_load_rules/actions.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/iris/fileformats/_nc_load_rules/actions.py b/lib/iris/fileformats/_nc_load_rules/actions.py index 568e01f84d..4676389cb7 100644 --- a/lib/iris/fileformats/_nc_load_rules/actions.py +++ b/lib/iris/fileformats/_nc_load_rules/actions.py @@ -487,6 +487,7 @@ def action_formula_term(engine, formula_term_fact): # TODO: change when we adopt cf-1.7 advanced grid-mping syntax engine.requires.setdefault("formula_terms", {})[term_name] = termvar_name rule_name = f"fc_formula_term({term_name})" + return rule_name def run_actions(engine): From dd726307cfe9cc4c746e55a558060ed2cb4cd817 Mon Sep 17 00:00:00 2001 From: Patrick Peglar Date: Fri, 11 Jun 2021 00:34:01 +0100 Subject: [PATCH 35/35] Temporary hacks to make all netcdf loading non-pyke, and check pyke/nonpyke equivalence. --- lib/iris/fileformats/netcdf.py | 71 ++++++++++++++++--- .../load_cube/load_cube__activate/__init__.py | 15 +++- 2 files changed, 72 insertions(+), 14 deletions(-) diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index da7a1ae451..37e6f39b41 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -415,12 +415,13 @@ def _pyke_kb_engine_real(): return engine -LOAD_PYKE = True +LOAD_PYKE = False +CHECK_BOTH = True -def _pyke_kb_engine(): +def _pyke_kb_engine(use_pyke=True): """Return a knowledge engine, or replacement object.""" - if LOAD_PYKE: + if use_pyke: engine = _pyke_kb_engine_real() else: # Deferred import to avoid circularity. @@ -816,7 +817,10 @@ def load_cubes(filenames, callback=None): """ # Initialise the pyke inference engine. - engine = _pyke_kb_engine() + if LOAD_PYKE or CHECK_BOTH: + pyke_engine = _pyke_kb_engine(use_pyke=True) + if not LOAD_PYKE or CHECK_BOTH: + nonpyke_engine = _pyke_kb_engine(use_pyke=False) if isinstance(filenames, str): filenames = [filenames] @@ -830,14 +834,59 @@ def load_cubes(filenames, callback=None): cf.cf_group.promoted.values() ) for cf_var in data_variables: - cube = _load_cube(engine, cf, cf_var, filename) - # Process any associated formula terms and attach - # the corresponding AuxCoordFactory. - try: - _load_aux_factory(engine, cube) - except ValueError as e: - warnings.warn("{}".format(e)) + def one_cube(engine, show_warnings=True): + cube = _load_cube(engine, cf, cf_var, filename) + + # Process any associated formula terms and attach + # the corresponding AuxCoordFactory. + try: + _load_aux_factory(engine, cube) + except ValueError as e: + if show_warnings: + warnings.warn("{}".format(e)) + + return cube + + if LOAD_PYKE or CHECK_BOTH: + # Make pyke cube if needed, show warnings if "primary" cube + cube_pyke = one_cube(pyke_engine, show_warnings=LOAD_PYKE) + if not LOAD_PYKE or CHECK_BOTH: + # Make nonpyke cube if needed, show warnings if "primary" cube + cube_nonpyke = one_cube( + nonpyke_engine, show_warnings=not LOAD_PYKE + ) + + if not LOAD_PYKE or CHECK_BOTH: + msgs = [] + if not LOAD_PYKE: + msgs.append("NONPYKE-LOAD") + warnings.warn("((NONPYKE-LOAD))") + if CHECK_BOTH: + msgs.append("PYKE-SAMECHECK") + msg = f"(({': '.join(msgs)}))" + print(msg) + + if CHECK_BOTH: + + def unmask_cube(cube): + cube = cube.copy() + if isinstance(cube.data, np.ma.MaskedArray): + cube.data = cube.data.filled(0) + return cube + + if unmask_cube(cube_nonpyke) != unmask_cube(cube_pyke): + warnings.warn("PYKE-SAMECHECK : unmasked cubes == failed.") + full_test = cube_nonpyke.copy() == cube_pyke.copy() + if full_test not in (True, np.ma.masked): + # NOTE: this one can fail: non-bool answer if masked ? + warnings.warn("PYKE-SAMECHECK : copied cubes == failed.") + + # Select "primary" cube to return. + if LOAD_PYKE: + cube = cube_pyke + else: + cube = cube_nonpyke # Perform any user registered callback function. cube = iris.io.run_callback(callback, cube, cf_var, filename) diff --git a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py index c3329209ce..dd9f843783 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py +++ b/lib/iris/tests/unit/fileformats/netcdf/load_cube/load_cube__activate/__init__.py @@ -79,6 +79,8 @@ class Mixin__nc_load_actions: # whether to test 'rules' or 'actions' implementations # TODO: remove when Pyke is gone use_pyke = True + use_pyke_override = False + debug_confirm_which = True # whether to output various debug info # TODO: ?possibly? remove when development is complete @@ -121,8 +123,11 @@ def load_cube_from_cdl(self, cdl_string, cdl_path, nc_path): cf_var = list(cf.cf_group.data_variables.values())[0] cf_var = cf.cf_group.data_variables["phenom"] - do_pyke = self.use_pyke or self.compare_pyke_nonpyke - do_nonpyke = not self.use_pyke or self.compare_pyke_nonpyke + use_pyke = self.use_pyke + if self.use_pyke_override is not None: + use_pyke = self.use_pyke_override + do_pyke = use_pyke or self.compare_pyke_nonpyke + do_nonpyke = not use_pyke or self.compare_pyke_nonpyke if do_pyke: pyke_engine = iris.fileformats.netcdf._pyke_kb_engine_real() if do_nonpyke: @@ -196,9 +201,13 @@ def show_cube(cube): self.assertEqual(pyke_cube_copy, nonpyke_cube_copy) # Return the right thing, whether we did 'both' or not - if self.use_pyke: + if use_pyke: + if self.debug_confirm_which: + print("(PYKE_TESTED)") result_cube = pyke_cube else: + if self.debug_confirm_which: + print("(NONPYKE_TESTED)") result_cube = nonpyke_cube # Always returns a single cube.