diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-21_cell_measure_attributes.txt b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-21_cell_measure_attributes.txt new file mode 100644 index 0000000000..577e193fe5 --- /dev/null +++ b/docs/iris/src/whatsnew/contributions_3.0.0/bugfix_2019-Nov-21_cell_measure_attributes.txt @@ -0,0 +1,2 @@ +* Fixed a bug where the attributes of cell measures in netcdf-CF files were discarded on + loading. They now appear on the CellMeasure in the loaded cube. diff --git a/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Nov-21_netcdf_ancillary_data.txt b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Nov-21_netcdf_ancillary_data.txt new file mode 100644 index 0000000000..3d17ce3dd4 --- /dev/null +++ b/docs/iris/src/whatsnew/contributions_3.0.0/newfeature_2019-Nov-21_netcdf_ancillary_data.txt @@ -0,0 +1,2 @@ +* CF Ancillary Variables are now loaded from and saved to netcdf-CF files. + diff --git a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb index 2afc823795..d41ec6aa3e 100644 --- a/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb +++ b/lib/iris/fileformats/_pyke_rules/fc_rules_cf.krb @@ -42,7 +42,7 @@ fc_provides_grid_mapping_rotated_latitude_longitude assert python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = build_rotated_coordinate_system(engine, cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, rotated_latitude_longitude) python engine.rule_triggered.add(rule.name) @@ -62,7 +62,7 @@ fc_provides_grid_mapping_latitude_longitude assert python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = build_coordinate_system(cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, latitude_longitude) python engine.rule_triggered.add(rule.name) @@ -81,7 +81,7 @@ fc_provides_grid_mapping_transverse_mercator assert python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = build_transverse_mercator_coordinate_system(engine, cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, transverse_mercator) python engine.rule_triggered.add(rule.name) @@ -102,7 +102,7 @@ fc_provides_grid_mapping_mercator assert python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = build_mercator_coordinate_system(engine, cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, mercator) python engine.rule_triggered.add(rule.name) @@ -123,7 +123,7 @@ fc_provides_grid_mapping_stereographic assert python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = build_stereographic_coordinate_system(engine, cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, stereographic) python engine.rule_triggered.add(rule.name) @@ -142,7 +142,7 @@ fc_provides_grid_mapping_lambert_conformal assert python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = build_lambert_conformal_coordinate_system(engine, cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, lambert_conformal) python engine.rule_triggered.add(rule.name) @@ -161,7 +161,7 @@ fc_provides_grid_mapping_lambert_azimuthal_equal_area assert python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = build_lambert_azimuthal_equal_area_coordinate_system(engine, cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, lambert_azimuthal_equal_area) python engine.rule_triggered.add(rule.name) @@ -180,7 +180,7 @@ fc_provides_grid_mapping_albers_equal_area assert python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = build_albers_equal_area_coordinate_system(engine, cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, albers_equal_area) python engine.rule_triggered.add(rule.name) @@ -200,7 +200,7 @@ fc_provides_grid_mapping_vertical_perspective python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = \ build_vertical_perspective_coordinate_system(engine, cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, vertical_perspective) python engine.rule_triggered.add(rule.name) @@ -221,7 +221,7 @@ fc_provides_grid_mapping_geostationary python cf_grid_var = engine.cf_var.cf_group.grid_mappings[$grid_mapping] python coordinate_system = \ build_geostationary_coordinate_system(engine, cf_grid_var) - python engine.provides['coordinate_system'] = coordinate_system + python engine.cube_parts['coordinate_system'] = coordinate_system facts_cf.provides(coordinate_system, geostationary) python engine.rule_triggered.add(rule.name) @@ -498,6 +498,22 @@ fc_build_cell_measure python engine.rule_triggered.add(rule.name) +# +# Context: +# This rule will trigger for each ancillary_variable case specific fact. +# +# Purpose: +# Add the ancillary variable to the cube. +# +fc_build_ancil_var + foreach + facts_cf.ancillary_variable($var) + assert + python ancil_var = engine.cf_var.cf_group.ancillary_variables[$var] + python build_ancil_var(engine, ancil_var) + python engine.rule_triggered.add(rule.name) + + # # Context: # This rule will trigger iff a CF latitude coordinate exists and @@ -515,7 +531,7 @@ fc_build_coordinate_latitude python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_LAT, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -536,7 +552,7 @@ fc_build_coordinate_latitude_rotated python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_GRID_LAT, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -557,7 +573,7 @@ fc_build_coordinate_longitude python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_LON, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -578,7 +594,7 @@ fc_build_coordinate_longitude_rotated python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_GRID_LON, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -644,7 +660,7 @@ fc_build_coordinate_projection_x_transverse_mercator python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -664,7 +680,7 @@ fc_build_coordinate_projection_y_transverse_mercator python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) # @@ -683,7 +699,7 @@ fc_build_coordinate_projection_x_lambert_conformal python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -703,7 +719,7 @@ fc_build_coordinate_projection_y_lambert_conformal python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -723,7 +739,7 @@ fc_build_coordinate_projection_x_mercator python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) # @@ -742,7 +758,7 @@ fc_build_coordinate_projection_y_mercator python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) # @@ -761,7 +777,7 @@ fc_build_coordinate_projection_x_stereographic python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) # @@ -780,7 +796,7 @@ fc_build_coordinate_projection_y_stereographic python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -800,7 +816,7 @@ fc_build_coordinate_projection_x_lambert_azimuthal_equal_area python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -820,7 +836,7 @@ fc_build_coordinate_projection_y_lambert_azimuthal_equal_area python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) # @@ -839,7 +855,7 @@ fc_build_coordinate_projection_x_albers_equal_area python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -859,7 +875,7 @@ fc_build_coordinate_projection_y_albers_equal_area python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) # @@ -878,7 +894,7 @@ fc_build_coordinate_projection_x_vertical_perspective python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -898,7 +914,7 @@ fc_build_coordinate_projection_y_vertical_perspective python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) # @@ -917,7 +933,7 @@ fc_build_coordinate_projection_x_geostationary python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_X, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -937,7 +953,7 @@ fc_build_coordinate_projection_y_geostationary python cf_coord_var = engine.cf_var.cf_group.coordinates[$coordinate] python build_dimension_coordinate(engine, cf_coord_var, coord_name=CF_VALUE_STD_NAME_PROJ_Y, - coord_system=engine.provides['coordinate_system']) + coord_system=engine.cube_parts['coordinate_system']) python engine.rule_triggered.add(rule.name) @@ -1675,6 +1691,9 @@ fc_extras if np.issubdtype(cf_var.dtype, np.str_): attr_units = NO_UNIT_STRING + if any(hasattr(cf_var.cf_data, name) for name in ("flag_values", "flag_masks", "flag_meanings")): + attr_units = cf_units._NO_UNIT_STRING + # Get any assoicated calendar for a time reference coordinate. if cf_units.as_unit(attr_units).is_time_reference(): attr_calendar = getattr(cf_var, CF_ATTR_CALENDAR, None) @@ -1878,7 +1897,7 @@ fc_extras cube.add_aux_coord(coord, data_dims) # Update the coordinate to CF-netCDF variable mapping. - engine.provides['coordinates'].append((coord, cf_coord_var.cf_name)) + engine.cube_parts['coordinates'].append((coord, cf_coord_var.cf_name)) ################################################################################ @@ -1943,25 +1962,26 @@ fc_extras # Add it to the cube cube.add_aux_coord(coord, data_dims) - # Update the coordinate to CF-netCDF variable mapping. - engine.provides['coordinates'].append((coord, cf_coord_var.cf_name)) + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts['coordinates'].append((coord, cf_coord_var.cf_name)) ################################################################################ - def build_cell_measures(engine, cf_cm_attr, coord_name=None): + def build_cell_measures(engine, cf_cm_var): """Create a CellMeasure instance and add it to the cube.""" cf_var = engine.cf_var cube = engine.cube attributes = {} # Get units - attr_units = get_attr_units(cf_cm_attr, attributes) + attr_units = get_attr_units(cf_cm_var, attributes) - data = _get_cf_var_data(cf_cm_attr, engine.filename) + # Get (lazy) content array + data = _get_cf_var_data(cf_cm_var, engine.filename) # Determine the name of the dimension/s shared between the CF-netCDF data variable # and the coordinate being built. - common_dims = [dim for dim in cf_cm_attr.dimensions + common_dims = [dim for dim in cf_cm_var.dimensions if dim in cf_var.dimensions] data_dims = None if common_dims: @@ -1969,10 +1989,10 @@ fc_extras data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] # Determine the standard_name, long_name and var_name - standard_name, long_name, var_name = get_names(cf_cm_attr, coord_name, attributes) + standard_name, long_name, var_name = get_names(cf_cm_var, None, attributes) # Obtain the cf_measure. - measure = cf_cm_attr.cf_measure + measure = cf_cm_var.cf_measure # Create the CellMeasure cell_measure = iris.coords.CellMeasure(data, @@ -1986,6 +2006,51 @@ fc_extras # Add it to the cube cube.add_cell_measure(cell_measure, data_dims) + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts['cell_measures'].append((cell_measure, cf_cm_var.cf_name)) + + + + ################################################################################ + def build_ancil_var(engine, cf_av_var): + """Create an AncillaryVariable instance and add it to the cube.""" + cf_var = engine.cf_var + cube = engine.cube + attributes = {} + + # Get units + attr_units = get_attr_units(cf_av_var, attributes) + + # Get (lazy) content array + data = _get_cf_var_data(cf_av_var, engine.filename) + + # Determine the name of the dimension/s shared between the CF-netCDF data variable + # and the AV being built. + common_dims = [dim for dim in cf_av_var.dimensions + if dim in cf_var.dimensions] + data_dims = None + if common_dims: + # Calculate the offset of each common dimension. + data_dims = [cf_var.dimensions.index(dim) for dim in common_dims] + + # Determine the standard_name, long_name and var_name + standard_name, long_name, var_name = get_names(cf_av_var, None, attributes) + + # Create the AncillaryVariable + av = iris.coords.AncillaryVariable( + data, + standard_name=standard_name, + long_name=long_name, + var_name=var_name, + units=attr_units, + attributes=attributes) + + # Add it to the cube + cube.add_ancillary_variable(av, data_dims) + + # Make a list with names, stored on the engine, so we can find them all later. + engine.cube_parts['ancillary_variables'].append((av, cf_av_var.cf_name)) + ################################################################################ diff --git a/lib/iris/fileformats/netcdf.py b/lib/iris/fileformats/netcdf.py index f34dc45e72..993214baa4 100644 --- a/lib/iris/fileformats/netcdf.py +++ b/lib/iris/fileformats/netcdf.py @@ -460,7 +460,10 @@ def __setstate__(self, state): def _assert_case_specific_facts(engine, cf, cf_group): # Initialise pyke engine "provides" hooks. - engine.provides["coordinates"] = [] + # These are used to patch non-processed element attributes after rules activation. + engine.cube_parts["coordinates"] = [] + engine.cube_parts["cell_measures"] = [] + engine.cube_parts["ancillary_variables"] = [] # Assert facts for CF coordinates. for cf_name in cf_group.coordinates.keys(): @@ -480,6 +483,12 @@ def _assert_case_specific_facts(engine, cf, cf_group): _PYKE_FACT_BASE, "cell_measure", (cf_name,) ) + # Assert facts for CF ancillary variables. + for cf_name in cf_group.ancillary_variables.keys(): + engine.add_case_specific_fact( + _PYKE_FACT_BASE, "ancillary_variable", (cf_name,) + ) + # Assert facts for CF grid_mappings. for cf_name in cf_group.grid_mappings.keys(): engine.add_case_specific_fact( @@ -587,7 +596,7 @@ def _load_cube(engine, cf, cf_var, filename): # Initialise pyke engine rule processing hooks. engine.cf_var = cf_var engine.cube = cube - engine.provides = {} + engine.cube_parts = {} engine.requires = {} engine.rule_triggered = set() engine.filename = filename @@ -598,31 +607,38 @@ def _load_cube(engine, cf, cf_var, filename): # Run pyke inference engine with forward chaining rules. engine.activate(_PYKE_RULE_BASE) - # Populate coordinate attributes with the untouched attributes from the - # associated CF-netCDF variable. - coordinates = engine.provides.get("coordinates", []) - + # Having run the rules, now populate the attributes of all the cf elements with the + # "unused" attributes from the associated CF-netCDF variable. + # That is, all those that aren't CF reserved terms. def attribute_predicate(item): return item[0] not in _CF_ATTRS - for coord, cf_var_name in coordinates: - tmpvar = filter( - attribute_predicate, cf.cf_group[cf_var_name].cf_attrs_unused() - ) + def add_unused_attributes(iris_object, cf_var): + tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused()) for attr_name, attr_value in tmpvar: - _set_attributes(coord.attributes, attr_name, attr_value) + _set_attributes(iris_object.attributes, attr_name, attr_value) + + def fix_attributes_all_elements(role_name): + elements_and_names = engine.cube_parts.get(role_name, []) + + for iris_object, cf_var_name in elements_and_names: + add_unused_attributes(iris_object, cf.cf_group[cf_var_name]) + + # Populate the attributes of all coordinates, cell-measures and ancillary-vars. + fix_attributes_all_elements("coordinates") + fix_attributes_all_elements("ancillary_variables") + fix_attributes_all_elements("cell_measures") - tmpvar = filter(attribute_predicate, cf_var.cf_attrs_unused()) - # Attach untouched attributes of the associated CF-netCDF data variable to - # the cube. - for attr_name, attr_value in tmpvar: - _set_attributes(cube.attributes, attr_name, attr_value) + # Also populate attributes of the top-level cube itself. + add_unused_attributes(cube, cf_var) + # Work out reference names for all the coords. names = { coord.var_name: coord.standard_name or coord.var_name or "unknown" for coord in cube.coords() } + # Add all the cube cell methods. cube.cell_methods = [ iris.coords.CellMethod( method=method.method, @@ -662,7 +678,7 @@ def coord_from_term(term): # Convert term names to coordinates (via netCDF variable names). name = engine.requires["formula_terms"].get(term, None) if name is not None: - for coord, cf_var_name in engine.provides["coordinates"]: + for coord, cf_var_name in engine.cube_parts["coordinates"]: if cf_var_name == name: return coord warnings.warn( diff --git a/lib/iris/tests/test_netcdf.py b/lib/iris/tests/test_netcdf.py index c69a83edd5..b012f6a89a 100644 --- a/lib/iris/tests/test_netcdf.py +++ b/lib/iris/tests/test_netcdf.py @@ -32,6 +32,7 @@ from iris.fileformats.netcdf import load_cubes as nc_load_cubes import iris.std_names import iris.util +from iris.coords import AncillaryVariable, CellMeasure import iris.coord_systems as icoord_systems import iris.tests.stock as stock from iris._lazy_data import is_lazy_data @@ -39,6 +40,13 @@ @tests.skip_data class TestNetCDFLoad(tests.IrisTest): + def setUp(self): + self.tmpdir = None + + def tearDown(self): + if self.tmpdir is not None: + shutil.rmtree(self.tmpdir) + def test_monotonic(self): cubes = iris.load( tests.get_data_path( @@ -243,6 +251,153 @@ def test_cell_methods(self): self.assertCML(cubes, ("netcdf", "netcdf_cell_methods.cml")) + def test_ancillary_variables(self): + # Note: using a CDL string as a test data reference, rather than a binary file. + ref_cdl = """ + netcdf cm_attr { + dimensions: + axv = 3 ; + variables: + int64 qqv(axv) ; + qqv:long_name = "qq" ; + qqv:units = "1" ; + qqv:ancillary_variables = "my_av" ; + int64 axv(axv) ; + axv:units = "1" ; + axv:long_name = "x" ; + double my_av(axv) ; + my_av:units = "1" ; + my_av:long_name = "refs" ; + my_av:custom = "extra-attribute"; + data: + axv = 1, 2, 3; + my_av = 11., 12., 13.; + } + """ + self.tmpdir = tempfile.mkdtemp() + cdl_path = os.path.join(self.tmpdir, "tst.cdl") + nc_path = os.path.join(self.tmpdir, "tst.nc") + # Write CDL string into a temporary CDL file. + with open(cdl_path, "w") as f_out: + f_out.write(ref_cdl) + # Use ncgen to convert this into an actual (temporary) netCDF file. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + check_call(command, shell=True) + # Load with iris.fileformats.netcdf.load_cubes, and check expected content. + cubes = list(nc_load_cubes(nc_path)) + self.assertEqual(len(cubes), 1) + avs = cubes[0].ancillary_variables() + self.assertEqual(len(avs), 1) + expected = AncillaryVariable( + np.ma.array([11.0, 12.0, 13.0]), + long_name="refs", + var_name="my_av", + units="1", + attributes={"custom": "extra-attribute"}, + ) + self.assertEqual(avs[0], expected) + + def test_status_flags(self): + # Note: using a CDL string as a test data reference, rather than a binary file. + ref_cdl = """ + netcdf cm_attr { + dimensions: + axv = 3 ; + variables: + int64 qqv(axv) ; + qqv:long_name = "qq" ; + qqv:units = "1" ; + qqv:ancillary_variables = "my_av" ; + int64 axv(axv) ; + axv:units = "1" ; + axv:long_name = "x" ; + byte my_av(axv) ; + my_av:long_name = "qq status_flag" ; + my_av:flag_values = 1b, 2b ; + my_av:flag_meanings = "a b" ; + data: + axv = 11, 21, 31; + my_av = 1b, 1b, 2b; + } + """ + self.tmpdir = tempfile.mkdtemp() + cdl_path = os.path.join(self.tmpdir, "tst.cdl") + nc_path = os.path.join(self.tmpdir, "tst.nc") + # Write CDL string into a temporary CDL file. + with open(cdl_path, "w") as f_out: + f_out.write(ref_cdl) + # Use ncgen to convert this into an actual (temporary) netCDF file. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + check_call(command, shell=True) + # Load with iris.fileformats.netcdf.load_cubes, and check expected content. + cubes = list(nc_load_cubes(nc_path)) + self.assertEqual(len(cubes), 1) + avs = cubes[0].ancillary_variables() + self.assertEqual(len(avs), 1) + expected = AncillaryVariable( + np.ma.array([1, 1, 2], dtype=np.int8), + long_name="qq status_flag", + var_name="my_av", + units="no_unit", + attributes={ + "flag_values": np.array([1, 2], dtype=np.int8), + "flag_meanings": "a b", + }, + ) + self.assertEqual(avs[0], expected) + + def test_cell_measures(self): + # Note: using a CDL string as a test data reference, rather than a binary file. + ref_cdl = """ + netcdf cm_attr { + dimensions: + axv = 3 ; + ayv = 2 ; + variables: + int64 qqv(ayv, axv) ; + qqv:long_name = "qq" ; + qqv:units = "1" ; + qqv:cell_measures = "area: my_areas" ; + int64 ayv(ayv) ; + ayv:units = "1" ; + ayv:long_name = "y" ; + int64 axv(axv) ; + axv:units = "1" ; + axv:long_name = "x" ; + double my_areas(ayv, axv) ; + my_areas:units = "m2" ; + my_areas:long_name = "standardised cell areas" ; + my_areas:custom = "extra-attribute"; + data: + axv = 11, 12, 13; + ayv = 21, 22; + my_areas = 110., 120., 130., 221., 231., 241.; + } + """ + self.tmpdir = tempfile.mkdtemp() + cdl_path = os.path.join(self.tmpdir, "tst.cdl") + nc_path = os.path.join(self.tmpdir, "tst.nc") + # Write CDL string into a temporary CDL file. + with open(cdl_path, "w") as f_out: + f_out.write(ref_cdl) + # Use ncgen to convert this into an actual (temporary) netCDF file. + command = "ncgen -o {} {}".format(nc_path, cdl_path) + check_call(command, shell=True) + # Load with iris.fileformats.netcdf.load_cubes, and check expected content. + cubes = list(nc_load_cubes(nc_path)) + self.assertEqual(len(cubes), 1) + cms = cubes[0].cell_measures() + self.assertEqual(len(cms), 1) + expected = CellMeasure( + np.ma.array([[110.0, 120.0, 130.0], [221.0, 231.0, 241.0]]), + measure="area", + var_name="my_areas", + long_name="standardised cell areas", + units="m2", + attributes={"custom": "extra-attribute"}, + ) + self.assertEqual(cms[0], expected) + def test_deferred_loading(self): # Test exercising CF-netCDF deferred loading and deferred slicing. # shape (31, 161, 320) diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py b/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py index 609f7d097a..48cc9c0d1a 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test__load_aux_factory.py @@ -26,8 +26,10 @@ def setUp(self): self.ap = mock.MagicMock(units="units") self.ps = mock.MagicMock(units="units") coordinates = [(mock.sentinel.b, "b"), (self.ps, "ps")] - self.provides = dict(coordinates=coordinates) - self.engine = mock.Mock(requires=self.requires, provides=self.provides) + self.cube_parts = dict(coordinates=coordinates) + self.engine = mock.Mock( + requires=self.requires, cube_parts=self.cube_parts + ) self.cube = mock.create_autospec(Cube, spec_set=True, instance=True) # Patch out the check_dependencies functionality. func = "iris.aux_factory.HybridPressureFactory._check_dependencies" @@ -36,7 +38,7 @@ def setUp(self): self.addCleanup(patcher.stop) def test_formula_terms_ap(self): - self.provides["coordinates"].append((self.ap, "ap")) + self.cube_parts["coordinates"].append((self.ap, "ap")) self.requires["formula_terms"] = dict(ap="ap", b="b", ps="ps") _load_aux_factory(self.engine, self.cube) # Check cube.add_aux_coord method. @@ -59,7 +61,9 @@ def test_formula_terms_a_p0(self): long_name="vertical pressure", var_name="ap", ) - self.provides["coordinates"].extend([(coord_a, "a"), (coord_p0, "p0")]) + self.cube_parts["coordinates"].extend( + [(coord_a, "a"), (coord_p0, "p0")] + ) self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") _load_aux_factory(self.engine, self.cube) # Check cube.coord_dims method. @@ -84,7 +88,7 @@ def test_formula_terms_a_p0(self): def test_formula_terms_p0_non_scalar(self): coord_p0 = DimCoord(np.arange(5)) - self.provides["coordinates"].append((coord_p0, "p0")) + self.cube_parts["coordinates"].append((coord_p0, "p0")) self.requires["formula_terms"] = dict(p0="p0") with self.assertRaises(ValueError): _load_aux_factory(self.engine, self.cube) @@ -92,7 +96,9 @@ def test_formula_terms_p0_non_scalar(self): def test_formula_terms_p0_bounded(self): coord_a = DimCoord(np.arange(5)) coord_p0 = DimCoord(1, bounds=[0, 2], var_name="p0") - self.provides["coordinates"].extend([(coord_a, "a"), (coord_p0, "p0")]) + self.cube_parts["coordinates"].extend( + [(coord_a, "a"), (coord_p0, "p0")] + ) self.requires["formula_terms"] = dict(a="a", b="b", ps="ps", p0="p0") with warnings.catch_warnings(record=True) as warn: warnings.simplefilter("always") @@ -133,14 +139,14 @@ def test_formula_terms_no_delta_terms(self): def test_formula_terms_no_p0_term(self): coord_a = DimCoord(np.arange(5), units="Pa") - self.provides["coordinates"].append((coord_a, "a")) + self.cube_parts["coordinates"].append((coord_a, "a")) self.requires["formula_terms"] = dict(a="a", b="b", ps="ps") _load_aux_factory(self.engine, self.cube) self._check_no_delta() def test_formula_terms_no_a_term(self): coord_p0 = DimCoord(10, units="1") - self.provides["coordinates"].append((coord_p0, "p0")) + self.cube_parts["coordinates"].append((coord_p0, "p0")) self.requires["formula_terms"] = dict(a="p0", b="b", ps="ps") _load_aux_factory(self.engine, self.cube) self._check_no_delta() diff --git a/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py b/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py index ffe48d437d..b4c93c45ab 100644 --- a/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py +++ b/lib/iris/tests/unit/fileformats/netcdf/test__load_cube.py @@ -25,7 +25,7 @@ def _patcher(engine, cf, cf_group): for coord in cf_group: engine.cube.add_aux_coord(coord) coordinates.append((coord, coord.name())) - engine.provides["coordinates"] = coordinates + engine.cube_parts["coordinates"] = coordinates def setUp(self): this = "iris.fileformats.netcdf._assert_case_specific_facts" diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py index 8734d883cd..bd2dc9d6ee 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_auxiliary_coordinate.py @@ -63,7 +63,7 @@ def setUp(self): cf_var=mock.Mock(dimensions=('foo', 'bar'), cf_data=cf_data), filename='DUMMY', - provides=dict(coordinates=[])) + cube_parts=dict(coordinates=[])) # Patch the deferred loading that prevents attempted file access. # This assumes that self.cf_bounds_var is defined in the test case. @@ -91,7 +91,7 @@ def _get_per_test_bounds_var(_coord_unused): def _make_array_and_cf_data(cls, dimension_names): shape = tuple(cls.dim_names_lens[name] for name in dimension_names) - cf_data = mock.Mock(_FillValue=None) + cf_data = mock.MagicMock(_FillValue=None, spec=[]) cf_data.chunking = mock.MagicMock(return_value=shape) return np.zeros(shape), cf_data @@ -120,9 +120,9 @@ def _check_case(self, dimension_names): self.engine.cube.add_aux_coord.assert_called_with( self.expected_coord, [0, 1]) - # Test that engine.provides container is correctly populated. + # Test that engine.cube_parts container is correctly populated. expected_list = [(self.expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.provides['coordinates'], + self.assertEqual(self.engine.cube_parts['coordinates'], expected_list) def test_fastest_varying_vertex_dim(self): @@ -144,7 +144,7 @@ class TestDtype(tests.IrisTest): def setUp(self): # Create coordinate cf variables and pyke engine. points = np.arange(6).reshape(2, 3) - cf_data = mock.Mock(_FillValue=None) + cf_data = mock.MagicMock(_FillValue=None) cf_data.chunking = mock.MagicMock(return_value=points.shape) self.cf_coord_var = mock.Mock( @@ -163,7 +163,7 @@ def setUp(self): cube=mock.Mock(), cf_var=mock.Mock(dimensions=('foo', 'bar')), filename='DUMMY', - provides=dict(coordinates=[])) + cube_parts=dict(coordinates=[])) def patched__getitem__(proxy_self, keys): if proxy_self.variable_name == self.cf_coord_var.cf_name: @@ -181,7 +181,7 @@ def test_scale_factor_add_offset_int(self): with self.deferred_load_patch: build_auxiliary_coordinate(self.engine, self.cf_coord_var) - coord, _ = self.engine.provides['coordinates'][0] + coord, _ = self.engine.cube_parts['coordinates'][0] self.assertEqual(coord.dtype.kind, 'i') def test_scale_factor_float(self): @@ -190,7 +190,7 @@ def test_scale_factor_float(self): with self.deferred_load_patch: build_auxiliary_coordinate(self.engine, self.cf_coord_var) - coord, _ = self.engine.provides['coordinates'][0] + coord, _ = self.engine.cube_parts['coordinates'][0] self.assertEqual(coord.dtype.kind, 'f') def test_add_offset_float(self): @@ -199,7 +199,7 @@ def test_add_offset_float(self): with self.deferred_load_patch: build_auxiliary_coordinate(self.engine, self.cf_coord_var) - coord, _ = self.engine.provides['coordinates'][0] + coord, _ = self.engine.cube_parts['coordinates'][0] self.assertEqual(coord.dtype.kind, 'f') @@ -210,7 +210,7 @@ def setUp(self): cube=mock.Mock(), cf_var=mock.Mock(dimensions=('foo', 'bar')), filename='DUMMY', - provides=dict(coordinates=[])) + cube_parts=dict(coordinates=[])) points = np.arange(6) self.cf_coord_var = mock.Mock( @@ -218,7 +218,7 @@ def setUp(self): scale_factor=1, add_offset=0, cf_name='wibble', - cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None)), + cf_data=mock.MagicMock(chunking=mock.Mock(return_value=None), spec=[]), standard_name=None, long_name='wibble', units='days since 1970-01-01', diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py index 7f6ecb27c2..fc31092b58 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_cube_metadata.py @@ -28,7 +28,7 @@ def _make_engine(global_attributes=None, standard_name=None, long_name=None): cf_group = mock.Mock(global_attributes=global_attributes) - cf_var = mock.Mock( + cf_var = mock.MagicMock( cf_name='wibble', standard_name=standard_name, long_name=long_name, diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py index 1917034a6b..eea2051eb6 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_build_dimension_coordinate.py @@ -30,7 +30,7 @@ def setUp(self): cube=mock.Mock(), cf_var=mock.Mock(dimensions=('foo', 'bar')), filename='DUMMY', - provides=dict(coordinates=[])) + cube_parts=dict(coordinates=[])) # Create patch for deferred loading that prevents attempted # file access. This assumes that self.cf_coord_var and @@ -75,6 +75,7 @@ def _set_cf_coord_var(self, points): self.cf_coord_var = mock.Mock( dimensions=('foo',), cf_name='wibble', + cf_data=mock.Mock(spec=[]), standard_name=None, long_name='wibble', units='days since 1970-01-01', @@ -226,6 +227,7 @@ def setUp(self): cf_name='wibble', standard_name=None, long_name='wibble', + cf_data=mock.Mock(spec=[]), units='m', shape=points.shape, dtype=points.dtype, @@ -258,9 +260,9 @@ def test_slowest_varying_vertex_dim(self): self.engine.cube.add_dim_coord.assert_called_with( expected_coord, [0]) - # Test that engine.provides container is correctly populated. + # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.provides['coordinates'], + self.assertEqual(self.engine.cube_parts['coordinates'], expected_list) def test_fastest_varying_vertex_dim(self): @@ -286,9 +288,9 @@ def test_fastest_varying_vertex_dim(self): self.engine.cube.add_dim_coord.assert_called_with( expected_coord, [0]) - # Test that engine.provides container is correctly populated. + # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.provides['coordinates'], + self.assertEqual(self.engine.cube_parts['coordinates'], expected_list) def test_fastest_with_different_dim_names(self): @@ -317,9 +319,9 @@ def test_fastest_with_different_dim_names(self): self.engine.cube.add_dim_coord.assert_called_with( expected_coord, [0]) - # Test that engine.provides container is correctly populated. + # Test that engine.cube_parts container is correctly populated. expected_list = [(expected_coord, self.cf_coord_var.cf_name)] - self.assertEqual(self.engine.provides['coordinates'], + self.assertEqual(self.engine.cube_parts['coordinates'], expected_list) @@ -332,11 +334,12 @@ def setUp(self): def _make_vars(self, points, bounds=None, units='degrees'): points = np.array(points) - self.cf_coord_var = mock.Mock( + self.cf_coord_var = mock.MagicMock( dimensions=('foo',), cf_name='wibble', standard_name=None, long_name='wibble', + cf_data=mock.Mock(spec=[]), units=units, shape=points.shape, dtype=points.dtype, @@ -435,6 +438,7 @@ def _make_vars(self, bounds): standard_name=None, long_name='wibble', units='degrees', + cf_data=mock.Mock(spec=[]), shape=(), dtype=points.dtype, __getitem__=lambda self, key: points[key]) diff --git a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py index b752de2370..a28768b413 100644 --- a/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py +++ b/lib/iris/tests/unit/fileformats/pyke_rules/compiled_krb/fc_rules_cf_fc/test_get_attr_units.py @@ -29,8 +29,9 @@ def _make_cf_var(global_attributes=None): cf_group = mock.Mock(global_attributes=global_attributes) - cf_var = mock.Mock( + cf_var = mock.MagicMock( cf_name='sound_frequency', + cf_data=mock.Mock(spec=[]), standard_name=None, long_name=None, units=u'\u266b',