diff --git a/lib/iris/etc/pp_save_rules.txt b/lib/iris/etc/pp_save_rules.txt
deleted file mode 100644
index 8d3183729e..0000000000
--- a/lib/iris/etc/pp_save_rules.txt
+++ /dev/null
@@ -1,784 +0,0 @@
-# (C) British Crown Copyright 2010 - 2017, Met Office
-#
-# This file is part of Iris.
-#
-# Iris is free software: you can redistribute it and/or modify it under
-# the terms of the GNU Lesser General Public License as published by the
-# Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# Iris is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with Iris. If not, see .
-
-################################################################
-### stuff that's missing from the default pp, or always true ###
-################################################################
-
-IF
- True
-THEN
- pp.lbproc = 0 # Processing. Start at 0.
-
-IF
- cm.coord_system("GeogCS") is not None or cm.coord_system(None) is None
-THEN
- pp.bplat = 90
- pp.bplon = 0
-
-IF
- cm.coord_system("RotatedGeogCS") is not None
-THEN
- pp.bplat = cm.coord_system("RotatedGeogCS").grid_north_pole_latitude
- pp.bplon = cm.coord_system("RotatedGeogCS").grid_north_pole_longitude
-
-
-#UM - no version number
-IF
- not 'um_version' in cm.attributes
- 'source' in cm.attributes
- len(cm.attributes['source'].rsplit("Data from Met Office Unified Model", 1)) > 1
- len(cm.attributes['source'].rsplit("Data from Met Office Unified Model", 1)[1]) == 0
-THEN
- pp.lbsrce = 1111
-
-#UM - with version number
-IF
- not 'um_version' in cm.attributes
- 'source' in cm.attributes
- len(cm.attributes['source'].rsplit("Data from Met Office Unified Model", 1)) > 1
- len(cm.attributes['source'].rsplit("Data from Met Office Unified Model", 1)[1]) > 0
-THEN
- pp.lbsrce = int(float(cm.attributes['source'].rsplit("Data from Met Office Unified Model", 1)[1]) * 1000000) + 1111 # UM version
-
-#UM - from 'um_version' attribute
-IF
- 'um_version' in cm.attributes
-THEN
- pp.lbsrce = 1111 + 10000 * int(cm.attributes['um_version'].split('.')[1]) + 1000000 * int(cm.attributes['um_version'].split('.')[0])
-
-IF
- 'STASH' in cm.attributes
- isinstance(cm.attributes['STASH'], iris.fileformats.pp.STASH)
-THEN
- pp.lbuser[3] = 1000 * (cm.attributes['STASH'].section or 0) + (cm.attributes['STASH'].item or 0)
- pp.lbuser[6] = (cm.attributes['STASH'].model or 0)
-
-
-######################################################
-### time - lbtim, t1, t2 and lbft (but not lbproc) ###
-######################################################
-
-#no forecast
-IF
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'forecast_period') is None
- scalar_coord(cm, 'forecast_reference_time') is None
-THEN
- pp.lbtim.ia = 0
- pp.lbtim.ib = 0
- pp.t1 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').points[0])
- pp.t2 = netcdftime.datetime(0, 0, 0)
-
-
-#forecast
-IF
- scalar_coord(cm, 'time') is not None
- not scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'forecast_period') is not None
-THEN
- pp.lbtim.ia = 0
- pp.lbtim.ib = 1
- pp.t1 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').points[0])
- pp.t2 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').points[0] - scalar_coord(cm, 'forecast_period').points[0])
- pp.lbft = scalar_coord(cm, 'forecast_period').points[0]
-
-
-#time mean (non-climatological)
-# XXX This only works when we have a single timestep
-IF
- # XXX How do we know *which* time to use if there are more than
- # one? *Can* there be more than one?
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'clim_season') is None
- scalar_coord(cm, 'forecast_period') is not None
- scalar_coord(cm, 'forecast_period').has_bounds()
-THEN
- pp.lbtim.ib = 2
- pp.t1 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0])
- pp.t2 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1])
- pp.lbft = scalar_coord(cm, 'forecast_period').units.convert(scalar_coord(cm, 'forecast_period').bounds[0, 1], 'hours')
-
-IF
- # Handle missing forecast period using time and forecast reference time.
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'clim_season') is None
- scalar_coord(cm, 'forecast_period') is None
- scalar_coord(cm, 'forecast_reference_time') is not None
-THEN
- pp.lbtim.ib = 2
- pp.t1 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0])
- pp.t2 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1])
- pp.lbft = scalar_coord(cm, 'time').units.convert(scalar_coord(cm, 'time').bounds[0, 1], 'hours since epoch') - scalar_coord(cm, 'forecast_reference_time').units.convert(scalar_coord(cm, 'forecast_reference_time').points[0], 'hours since epoch')
-
-IF
- # XXX Note the repetition of the previous rule's constraints
- # This can be addressed through REQUIRES/PROVIDES extensions
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'clim_season') is None
- scalar_coord(cm, 'forecast_period') is not None or scalar_coord(cm, 'forecast_reference_time') is not None
- scalar_cell_method(cm, 'mean', 'time') is not None
- scalar_cell_method(cm, 'mean', 'time').intervals != ()
- scalar_cell_method(cm, 'mean', 'time').intervals[0].endswith('hour')
-THEN
- pp.lbtim.ia = int(scalar_cell_method(cm, 'mean', 'time').intervals[0][:-5])
-
-IF
- # XXX Note the repetition of the previous rule's constraints
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'clim_season') is None
- scalar_coord(cm, 'forecast_period') is not None or scalar_coord(cm, 'forecast_reference_time') is not None
- scalar_cell_method(cm, 'mean', 'time') is None or scalar_cell_method(cm, 'mean', 'time').intervals == () or not scalar_cell_method(cm, 'mean', 'time').intervals[0].endswith('hour')
-THEN
- pp.lbtim.ia = 0
-
-IF
- # If the cell methods contain a minimum then overwrite lbtim.ia with this
- # interval
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'clim_season') is None
- scalar_coord(cm, 'forecast_period') is not None or scalar_coord(cm, 'forecast_reference_time') is not None
- scalar_cell_method(cm, 'minimum', 'time') is not None
- scalar_cell_method(cm, 'minimum', 'time').intervals != ()
- scalar_cell_method(cm, 'minimum', 'time').intervals[0].endswith('hour')
-THEN
- # set lbtim.ia with the integer part of the cell method's interval
- # e.g. if interval is '24 hour' then lbtim.ia becomes 24
- pp.lbtim.ia = int(scalar_cell_method(cm, 'minimum', 'time').intervals[0][:-5])
-
-IF
- # If the cell methods contain a maximum then overwrite lbtim.ia with this
- # interval
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'clim_season') is None
- scalar_coord(cm, 'forecast_period') is not None or scalar_coord(cm, 'forecast_reference_time') is not None
- scalar_cell_method(cm, 'maximum', 'time') is not None
- scalar_cell_method(cm, 'maximum', 'time').intervals != ()
- scalar_cell_method(cm, 'maximum', 'time').intervals[0].endswith('hour')
-THEN
- # set lbtim.ia with the integer part of the cell method's interval
- # e.g. if interval is '1 hour' then lbtim.ia becomes 1
- pp.lbtim.ia = int(scalar_cell_method(cm, 'maximum', 'time').intervals[0][:-5])
-
-#climatiological time mean - single year
-IF
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0]).year == scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1]).year
- scalar_coord(cm, 'forecast_period') is not None
- scalar_coord(cm, 'forecast_period').has_bounds()
- scalar_coord(cm, 'clim_season') is not None
- 'clim_season' in cm.cell_methods[-1].coord_names
-THEN
- pp.lbtim.ia = 0
- pp.lbtim.ib = 2
- pp.t1 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0, 0])
- pp.t2 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0, 1])
- pp.lbft = scalar_coord(cm, 'forecast_period').units.convert(scalar_coord(cm, 'forecast_period').bounds[0, 1], 'hours')
-
-
-#climatiological time mean - spanning years - djf
-IF
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0]).year != scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1]).year
- scalar_coord(cm, 'forecast_period') is not None
- scalar_coord(cm, 'forecast_period').has_bounds()
- scalar_coord(cm, 'clim_season') is not None
- 'clim_season' in cm.cell_methods[-1].coord_names
- scalar_coord(cm, 'clim_season').points[0] == 'djf'
-THEN
- pp.lbtim.ia = 0
- pp.lbtim.ib = 3
-
- pp.t1 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0])
- pp.t2 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1])
-
- pp.t1 = netcdftime.datetime( pp.t1.year if pp.t1.month==12 else pp.t1.year-1, 12, 1, 0, 0, 0 )
- pp.t2 = netcdftime.datetime( pp.t2.year, 3, 1, 0, 0, 0 )
-
- self.conditional_warning(scalar_coord(cm, 'time').bounds[0,0] != scalar_coord(cm, 'time').units.date2num(pp.t1), "modified t1 for climatological seasonal mean")
- self.conditional_warning(scalar_coord(cm, 'time').bounds[0,1] != scalar_coord(cm, 'time').units.date2num(pp.t2), "modified t2 for climatological seasonal mean")
-
- pp.lbft = scalar_coord(cm, 'forecast_period').units.convert(scalar_coord(cm, 'forecast_period').bounds[0, 1], 'hours')
-
-#climatiological time mean - spanning years - mam
-IF
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0]).year != scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1]).year
- scalar_coord(cm, 'forecast_period') is not None
- scalar_coord(cm, 'forecast_period').has_bounds()
- scalar_coord(cm, 'clim_season') is not None
- 'clim_season' in cm.cell_methods[-1].coord_names
- scalar_coord(cm, 'clim_season').points[0] == 'mam'
-THEN
- pp.lbtim.ia = 0
- pp.lbtim.ib = 3
-
- pp.t1 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0])
- pp.t2 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1])
-
- pp.t1 = netcdftime.datetime( pp.t1.year, 3, 1, 0, 0, 0 )
- pp.t2 = netcdftime.datetime( pp.t2.year, 6, 1, 0, 0, 0 )
-
- self.conditional_warning(scalar_coord(cm, 'time').bounds[0,0] != scalar_coord(cm, 'time').units.date2num(pp.t1), "modified t1 for climatological seasonal mean")
- self.conditional_warning(scalar_coord(cm, 'time').bounds[0,1] != scalar_coord(cm, 'time').units.date2num(pp.t2), "modified t2 for climatological seasonal mean")
-
- pp.lbft = scalar_coord(cm, 'forecast_period').units.convert(scalar_coord(cm, 'forecast_period').bounds[0, 1], 'hours')
-
-#climatiological time mean - spanning years - jja
-IF
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0]).year != scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1]).year
- scalar_coord(cm, 'forecast_period') is not None
- scalar_coord(cm, 'forecast_period').has_bounds()
- scalar_coord(cm, 'clim_season') is not None
- 'clim_season' in cm.cell_methods[-1].coord_names
- scalar_coord(cm, 'clim_season').points[0] == 'jja'
-THEN
- pp.lbtim.ia = 0
- pp.lbtim.ib = 3
-
- pp.t1 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0])
- pp.t2 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1])
-
- pp.t1 = netcdftime.datetime( pp.t1.year, 6, 1, 0, 0, 0 )
- pp.t2 = netcdftime.datetime( pp.t2.year, 9, 1, 0, 0, 0 )
-
- self.conditional_warning(scalar_coord(cm, 'time').bounds[0,0] != scalar_coord(cm, 'time').units.date2num(pp.t1), "modified t1 for climatological seasonal mean")
- self.conditional_warning(scalar_coord(cm, 'time').bounds[0,1] != scalar_coord(cm, 'time').units.date2num(pp.t2), "modified t2 for climatological seasonal mean")
-
- pp.lbft = scalar_coord(cm, 'forecast_period').units.convert(scalar_coord(cm, 'forecast_period').bounds[0, 1], 'hours')
-
-#climatiological time mean - spanning years - son
-IF
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').has_bounds()
- scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0]).year != scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1]).year
- scalar_coord(cm, 'forecast_period') is not None
- scalar_coord(cm, 'forecast_period').has_bounds()
- scalar_coord(cm, 'clim_season') is not None
- 'clim_season' in cm.cell_methods[-1].coord_names
- scalar_coord(cm, 'clim_season').points[0] == 'son'
-THEN
- pp.lbtim.ia = 0
- pp.lbtim.ib = 3
-
- pp.t1 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,0])
- pp.t2 = scalar_coord(cm, 'time').units.num2date(scalar_coord(cm, 'time').bounds[0,1])
-
- pp.t1 = netcdftime.datetime( pp.t1.year, 9, 1, 0, 0, 0 )
- pp.t2 = netcdftime.datetime( pp.t2.year, 12, 1, 0, 0, 0 )
-
- self.conditional_warning(scalar_coord(cm, 'time').bounds[0,0] != scalar_coord(cm, 'time').units.date2num(pp.t1), "modified t1 for climatological seasonal mean")
- self.conditional_warning(scalar_coord(cm, 'time').bounds[0,1] != scalar_coord(cm, 'time').units.date2num(pp.t2), "modified t2 for climatological seasonal mean")
-
- pp.lbft = scalar_coord(cm, 'forecast_period').units.convert(scalar_coord(cm, 'forecast_period').bounds[0, 1], 'hours')
-
-#360 day calendar
-IF
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').units.calendar == '360_day'
-THEN
- pp.lbtim.ic = 2
-
-
-#gregorian calendar
-IF
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').units.calendar == 'gregorian'
-THEN
- pp.lbtim.ic = 1
-
-
-#365 day calendar
-IF
- scalar_coord(cm, 'time') is not None
- scalar_coord(cm, 'time').units.calendar == '365_day'
-THEN
- pp.lbtim.ic = 4
-
-#####################
-### grid and pole ###
-#####################
-
-IF
- vector_coord(cm, 'longitude') and not is_regular(vector_coord(cm, 'longitude'))
-THEN
- pp.bzx = 0
- pp.bdx = 0
- pp.lbnpt = vector_coord(cm, 'longitude').shape[0]
- pp.x = vector_coord(cm, 'longitude').points
-
-IF
- vector_coord(cm, 'grid_longitude') and not is_regular(vector_coord(cm, 'grid_longitude'))
-THEN
- pp.bzx = 0
- pp.bdx = 0
- pp.lbnpt = vector_coord(cm, 'grid_longitude').shape[0]
- pp.x = vector_coord(cm, 'grid_longitude').points
-
-IF
- vector_coord(cm, 'latitude') and not is_regular(vector_coord(cm, 'latitude'))
-THEN
- pp.bzy = 0
- pp.bdy = 0
- pp.lbrow = vector_coord(cm, 'latitude').shape[0]
- pp.y = vector_coord(cm, 'latitude').points
-
-IF
- vector_coord(cm, 'grid_latitude') and not is_regular(vector_coord(cm, 'grid_latitude'))
-THEN
- pp.bzy = 0
- pp.bdy = 0
- pp.lbrow = vector_coord(cm, 'grid_latitude').shape[0]
- pp.y = vector_coord(cm, 'grid_latitude').points
-
-IF
- vector_coord(cm, 'longitude') and is_regular(vector_coord(cm, 'longitude'))
-THEN
- pp.bzx = vector_coord(cm, 'longitude').points[0] - regular_step(vector_coord(cm, 'longitude'))
- pp.bdx = regular_step(vector_coord(cm, 'longitude'))
- pp.lbnpt = len(vector_coord(cm, 'longitude').points)
-
-IF
- vector_coord(cm, 'grid_longitude') and is_regular(vector_coord(cm, 'grid_longitude'))
-THEN
- pp.bzx = vector_coord(cm, 'grid_longitude').points[0] - regular_step(vector_coord(cm, 'grid_longitude'))
- pp.bdx = regular_step(vector_coord(cm, 'grid_longitude'))
- pp.lbnpt = len(vector_coord(cm, 'grid_longitude').points)
-
-IF
- vector_coord(cm, 'latitude') and is_regular(vector_coord(cm, 'latitude'))
-THEN
- pp.bzy = vector_coord(cm, 'latitude').points[0] - regular_step(vector_coord(cm, 'latitude'))
- pp.bdy = regular_step(vector_coord(cm, 'latitude'))
- pp.lbrow = len(vector_coord(cm, 'latitude').points)
-
-IF
- vector_coord(cm, 'grid_latitude') and is_regular(vector_coord(cm, 'grid_latitude'))
-THEN
- pp.bzy = vector_coord(cm, 'grid_latitude').points[0] - regular_step(vector_coord(cm, 'grid_latitude'))
- pp.bdy = regular_step(vector_coord(cm, 'grid_latitude'))
- pp.lbrow = len(vector_coord(cm, 'grid_latitude').points)
-
-
-#rotated?
-IF
-# iris.fileformats.pp.is_cross_section(cm) == False
- cm.coord_system("RotatedGeogCS") is not None
-THEN
- pp.lbcode = int(pp.lbcode) + 100
-
-
-#lon global
-IF
- vector_coord(cm, 'longitude') is not None
- vector_coord(cm, 'longitude').circular
-THEN
- pp.lbhem = 0
-
-IF
- vector_coord(cm, 'grid_longitude') is not None
- vector_coord(cm, 'grid_longitude').circular
-THEN
- pp.lbhem = 0
-
-#lon not global
-IF
- vector_coord(cm, 'longitude') is not None
- not vector_coord(cm, 'longitude').circular
-THEN
- pp.lbhem = 3
-
-IF
- vector_coord(cm, 'grid_longitude') is not None
- not vector_coord(cm, 'grid_longitude').circular
-THEN
- pp.lbhem = 3
-
-
-
-#####################################################
-############ non-standard cross-sections ############
-#####################################################
-
-# Ticket #1037, x=latitude, y=air_pressure - non-standard cross-section with bounds
-IF
- vector_coord(cm, 'air_pressure') is not None
- not vector_coord(cm, 'air_pressure').circular
- vector_coord(cm, 'air_pressure').has_bounds()
- vector_coord(cm, 'latitude') is not None
- not vector_coord(cm, 'latitude').circular
- vector_coord(cm, 'latitude').has_bounds()
-THEN
- pp.lbcode = 10000 + int(100*10) + 1
- pp.bgor = 0
- pp.y = vector_coord(cm, 'air_pressure').points
- pp.y_lower_bound = vector_coord(cm, 'air_pressure').bounds[:,0]
- pp.y_upper_bound = vector_coord(cm, 'air_pressure').bounds[:,1]
- pp.x = vector_coord(cm, 'latitude').points
- pp.x_lower_bound = vector_coord(cm, 'latitude').bounds[:,0]
- pp.x_upper_bound = vector_coord(cm, 'latitude').bounds[:,1]
- pp.lbrow = vector_coord(cm, 'air_pressure').shape[0]
- pp.lbnpt = vector_coord(cm, 'latitude').shape[0]
- pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
-
-# Ticket #1037, x=latitude, y=depth - non-standard cross-section with bounds
-IF
- vector_coord(cm, 'depth') is not None
- not vector_coord(cm, 'depth').circular
- vector_coord(cm, 'depth').has_bounds()
- vector_coord(cm, 'latitude') is not None
- not vector_coord(cm, 'latitude').circular
- vector_coord(cm, 'latitude').has_bounds()
-THEN
- pp.lbcode = 10000 + int(100*10) + 4
- pp.bgor = 0
- pp.y = vector_coord(cm, 'depth').points
- pp.y_lower_bound = vector_coord(cm, 'depth').bounds[:,0]
- pp.y_upper_bound = vector_coord(cm, 'depth').bounds[:,1]
- pp.x = vector_coord(cm, 'latitude').points
- pp.x_lower_bound = vector_coord(cm, 'latitude').bounds[:,0]
- pp.x_upper_bound = vector_coord(cm, 'latitude').bounds[:,1]
- pp.lbrow = vector_coord(cm, 'depth').shape[0]
- pp.lbnpt = vector_coord(cm, 'latitude').shape[0]
- pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
-
-# Ticket #1037, x=latitude, y=ETA - non-standard cross-section with bounds
-IF
- vector_coord(cm, 'eta') is not None
- not vector_coord(cm, 'eta').circular
- vector_coord(cm, 'eta').has_bounds()
- vector_coord(cm, 'latitude') is not None
- not vector_coord(cm, 'latitude').circular
- vector_coord(cm, 'latitude').has_bounds()
-THEN
- pp.lbcode = 10000 + int(100*10) + 3
- pp.bgor = 0
- pp.y = vector_coord(cm, 'eta').points
- pp.y_lower_bound = vector_coord(cm, 'eta').bounds[:,0]
- pp.y_upper_bound = vector_coord(cm, 'eta').bounds[:,1]
- pp.x = vector_coord(cm, 'latitude').points
- pp.x_lower_bound = vector_coord(cm, 'latitude').bounds[:,0]
- pp.x_upper_bound = vector_coord(cm, 'latitude').bounds[:,1]
- pp.lbrow = vector_coord(cm, 'eta').shape[0]
- pp.lbnpt = vector_coord(cm, 'latitude').shape[0]
- pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
-
-# Ticket #1037, x=days (360 calendar), y=depth - non-standard cross-section with bounds
-IF
- vector_coord(cm, 'depth') is not None
- not vector_coord(cm, 'depth').circular
- vector_coord(cm, 'depth').has_bounds()
- vector_coord(cm, 'time') is not None
- not vector_coord(cm, 'time').circular
- vector_coord(cm, 'time').has_bounds()
-THEN
- pp.lbcode = 10000 + int(100*23) + 4
- pp.bgor = 0
- pp.y = vector_coord(cm, 'depth').points
- pp.y_lower_bound = vector_coord(cm, 'depth').bounds[:,0]
- pp.y_upper_bound = vector_coord(cm, 'depth').bounds[:,1]
- pp.x = vector_coord(cm, 'time').points
- pp.x_lower_bound = vector_coord(cm, 'time').bounds[:,0]
- pp.x_upper_bound = vector_coord(cm, 'time').bounds[:,1]
- pp.lbrow = vector_coord(cm, 'depth').shape[0]
- pp.lbnpt = vector_coord(cm, 'time').shape[0]
- pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
-
-
-# Ticket #1037, x=days (360 calendar), y=air_pressure - non-standard cross-section with bounds
-IF
- vector_coord(cm, 'air_pressure') is not None
- not vector_coord(cm, 'air_pressure').circular
- vector_coord(cm, 'air_pressure').has_bounds()
- vector_coord(cm, 'time') is not None
- not vector_coord(cm, 'time').circular
- vector_coord(cm, 'time').has_bounds()
-THEN
- pp.lbcode = 10000 + int(100*23) + 1
- pp.bgor = 0
- pp.y = vector_coord(cm, 'air_pressure').points
- pp.y_lower_bound = vector_coord(cm, 'air_pressure').bounds[:,0]
- pp.y_upper_bound = vector_coord(cm, 'air_pressure').bounds[:,1]
- pp.x = vector_coord(cm, 'time').points
- pp.x_lower_bound = vector_coord(cm, 'time').bounds[:,0]
- pp.x_upper_bound = vector_coord(cm, 'time').bounds[:,1]
- pp.lbrow = vector_coord(cm, 'air_pressure').shape[0]
- pp.lbnpt = vector_coord(cm, 'time').shape[0]
- pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
-
-
-
-
-
-#####################################################
-### lbproc (must start at 0 before rules are run) ###
-#####################################################
-
-IF
- cm.attributes.get("ukmo__process_flags", None)
-THEN
- pp.lbproc += sum([iris.fileformats.pp.lbproc_map[name] for name in cm.attributes["ukmo__process_flags"]])
-
-#zonal-mean
-IF
- # Look for a CellMethod which is a "mean" over "longitude".
- scalar_cell_method(cm, 'mean', 'longitude') is not None
-THEN
- pp.lbproc += 64
-
-IF
- # Look for a CellMethod which is a "mean" over "grid longitude".
- scalar_cell_method(cm, 'mean', 'grid_longitude') is not None
-THEN
- pp.lbproc += 64
-
-#time-mean
-IF
- # Look for a CellMethod which is a "mean" over "time".
- scalar_cell_method(cm, 'mean', 'time') is not None
-THEN
- pp.lbproc += 128
-
-#time-minimum
-IF
- # Look for a CellMethod which is a "minimum" over "time".
- scalar_cell_method(cm, 'minimum', 'time') is not None
-THEN
- pp.lbproc += 4096
-
-#time-maximum
-IF
- # Look for a CellMethod which is a "maximum" over "time".
- scalar_cell_method(cm, 'maximum', 'time') is not None
-THEN
- pp.lbproc += 8192
-
-##########################
-### vertical - lbuser5 ###
-##########################
-
-IF
- scalar_coord(cm, 'pseudo_level') is not None
- not scalar_coord(cm, 'pseudo_level').bounds
-THEN
- pp.lbuser[4] = scalar_coord(cm, 'pseudo_level').points[0]
-
-
-################################
-### vertical - lbvc and blev ###
-################################
-
-#single height level
-IF
- scalar_coord(cm, 'height') is not None
- not scalar_coord(cm, 'height').bounds
- scalar_coord(cm, 'height').points[0] == 1.5
- cm.name() == 'air_temperature'
-THEN
- pp.lbvc = 129
- pp.blev = -1
-
-IF
- pp.lbvc == 0
- scalar_coord(cm, 'height') is not None
- not scalar_coord(cm, 'height').bounds
-THEN
- pp.lbvc = 1
- pp.blev = cm.coord('height').points[0]
-
-
-#single air_pressure level
-IF
- scalar_coord(cm, 'air_pressure') is not None
- not scalar_coord(cm, 'air_pressure').bounds
-THEN
- pp.lbvc = 8
- pp.blev = scalar_coord(cm, 'air_pressure').points[0]
-
-#single "pressure" level
-#TODO: "pressure" is in the PP load rules awaiting more info
-IF
- scalar_coord(cm, 'pressure') is not None
- not scalar_coord(cm, 'pressure').bounds
-THEN
- pp.lbvc = 8
- pp.blev = scalar_coord(cm, 'pressure').points[0]
-
-
-# single depth level (non cross section)
-IF
- scalar_coord(cm, 'model_level_number') is not None
- not scalar_coord(cm, 'model_level_number').bounds
- scalar_coord(cm, 'depth') is not None
- not scalar_coord(cm, 'depth').bounds
-THEN
- pp.lbvc = 2
- pp.lblev = scalar_coord(cm, 'model_level_number').points[0]
- pp.blev = scalar_coord(cm, 'depth').points[0]
-
-# single depth level (Non-dimensional soil model level)
-IF
- scalar_coord(cm, 'soil_model_level_number') is not None
- not scalar_coord(cm, 'soil_model_level_number').has_bounds()
- # The following `is None` checks ensure this rule does not get run
- # if any of the previous LBVC setting rules have run. It gives these
- # rules something of an IF-THEN-ELSE structure.
- scalar_coord(cm, 'air_pressure') is None
- scalar_coord(cm, 'depth') is None
- scalar_coord(cm, 'height') is None
- scalar_coord(cm, 'pressure') is None
- cm.standard_name is not None
- 'soil' in cm.standard_name
-THEN
- pp.lbvc = 6
- pp.lblev = scalar_coord(cm, 'soil_model_level_number').points[0]
- pp.blev = pp.lblev
- pp.brsvd[0] = 0
- pp.brlev = 0
-
-# single depth level (soil depth)
-IF
- scalar_coord(cm, 'depth') is not None
- scalar_coord(cm, 'depth').has_bounds()
- # The following `is None` checks ensure this rule does not get run
- # if any of the previous LBVC setting rules have run. It gives these
- # rules something of an IF-THEN-ELSE structure.
- scalar_coord(cm, 'air_pressure') is None
- scalar_coord(cm, 'soil_model_level_number') is None
- scalar_coord(cm, 'model_level_number') is None
- scalar_coord(cm, 'height') is None
- scalar_coord(cm, 'pressure') is None
- cm.standard_name is not None
- 'soil' in cm.standard_name
-THEN
- pp.lbvc = 6
- pp.blev = scalar_coord(cm, 'depth').points[0]
- pp.brsvd[0] = scalar_coord(cm, 'depth').bounds[0, 0]
- pp.brlev = scalar_coord(cm, 'depth').bounds[0, 1]
-
-# single potential-temperature level
-IF
- scalar_coord(cm, 'air_potential_temperature') is not None
- not scalar_coord(cm, 'air_potential_temperature').bounds
- # The following `is None` checks ensure this rule does not get run
- # if any of the previous LBVC setting rules have run. It gives these
- # rules something of an IF-THEN-ELSE structure.
- scalar_coord(cm, 'air_pressure') is None
- scalar_coord(cm, 'depth') is None
- scalar_coord(cm, 'height') is None
- scalar_coord(cm, 'pressure') is None
- scalar_coord(cm, 'model_level_number') is None
-THEN
- pp.lbvc = 19
- pp.lblev = scalar_coord(cm, 'air_potential_temperature').points[0]
- pp.blev = scalar_coord(cm, 'air_potential_temperature').points[0]
-
-# single hybrid_height level (without aux factory e.g. due to missing orography)
-IF
- not has_aux_factory(cm, iris.aux_factory.HybridHeightFactory)
- scalar_coord(cm, 'model_level_number') is not None
- scalar_coord(cm, 'model_level_number').bounds is None
- scalar_coord(cm, 'level_height') is not None
- scalar_coord(cm, 'level_height').bounds is not None
- scalar_coord(cm, 'sigma') is not None
- scalar_coord(cm, 'sigma').bounds is not None
-THEN
- pp.lbvc = 65
- pp.lblev = scalar_coord(cm, 'model_level_number').points[0]
- pp.blev = scalar_coord(cm, 'level_height').points[0]
- pp.brlev = scalar_coord(cm, 'level_height').bounds[0, 0]
- pp.brsvd[0] = scalar_coord(cm, 'level_height').bounds[0, 1]
- pp.bhlev = scalar_coord(cm, 'sigma').points[0]
- pp.bhrlev = scalar_coord(cm, 'sigma').bounds[0, 0]
- pp.brsvd[1] = scalar_coord(cm, 'sigma').bounds[0, 1]
-
-# single hybrid_height level (with aux factory)
-IF
- has_aux_factory(cm, iris.aux_factory.HybridHeightFactory)
- scalar_coord(cm, 'model_level_number') is not None
- scalar_coord(cm, 'model_level_number').bounds is None
- aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['delta'] is not None
- aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['delta'].bounds is not None
- aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['sigma'] is not None
- aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['sigma'].bounds is not None
-THEN
- pp.lbvc = 65
- pp.lblev = scalar_coord(cm, 'model_level_number').points[0]
- pp.blev = aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['delta'].points[0]
- pp.brlev = aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['delta'].bounds[0, 0]
- pp.brsvd[0] = aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['delta'].bounds[0, 1]
- pp.bhlev = aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['sigma'].points[0]
- pp.bhrlev = aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['sigma'].bounds[0, 0]
- pp.brsvd[1] = aux_factory(cm, iris.aux_factory.HybridHeightFactory).dependencies['sigma'].bounds[0, 1]
-
-# single hybrid pressure level
-IF
- has_aux_factory(cm, iris.aux_factory.HybridPressureFactory)
- scalar_coord(cm, 'model_level_number') is not None
- scalar_coord(cm, 'model_level_number').bounds is None
- aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['delta'] is not None
- aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['delta'].bounds is not None
- aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['sigma'] is not None
- aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['sigma'].bounds is not None
-THEN
- pp.lbvc = 9
- pp.lblev = scalar_coord(cm, 'model_level_number').points[0]
-
- # Note that sigma and delta are swapped around from the hybrid height rules above.
- pp.blev = aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['sigma'].points[0]
- pp.brlev = aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['sigma'].bounds[0, 0]
- pp.brsvd[0] = aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['sigma'].bounds[0, 1]
-
- pp.bhlev = aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['delta'].points[0]
- pp.bhrlev = aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['delta'].bounds[0, 0]
- pp.brsvd[1] = aux_factory(cm, iris.aux_factory.HybridPressureFactory).dependencies['delta'].bounds[0, 1]
-
-
-#MDI
-IF
- cm.fill_value is not None
-THEN
- pp.bmdi = cm.fill_value
-
-IF
- cm.fill_value is None
-THEN
- pp.bmdi = -1e30
-
-
-# CFname mega rule
-IF
- (cm.standard_name, cm.long_name, str(cm.units)) in iris.fileformats.um_cf_map.CF_TO_LBFC
-THEN
- pp.lbfc = iris.fileformats.um_cf_map.CF_TO_LBFC[(cm.standard_name, cm.long_name, str(cm.units))]
-
-IF
- 'STASH' in cm.attributes
- str(cm.attributes['STASH']) in iris.fileformats._ff_cross_references.STASH_TRANS
-THEN
- pp.lbfc = iris.fileformats._ff_cross_references.STASH_TRANS[str(cm.attributes['STASH'])].field_code
diff --git a/lib/iris/fileformats/_pp_lbproc_pairs.py b/lib/iris/fileformats/_pp_lbproc_pairs.py
new file mode 100644
index 0000000000..28e35a8e86
--- /dev/null
+++ b/lib/iris/fileformats/_pp_lbproc_pairs.py
@@ -0,0 +1,48 @@
+# (C) British Crown Copyright 2017, Met Office
+#
+# This file is part of Iris.
+#
+# Iris is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Iris is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Iris. If not, see .
+
+from __future__ import (absolute_import, division, print_function)
+from six.moves import (filter, input, map, range, zip) # noqa
+import six
+
+import itertools
+
+
+# LBPROC codes and their English equivalents
+LBPROC_PAIRS = ((1, "Difference from another experiment"),
+ (2, "Difference from zonal (or other spatial) mean"),
+ (4, "Difference from time mean"),
+ (8, "X-derivative (d/dx)"),
+ (16, "Y-derivative (d/dy)"),
+ (32, "Time derivative (d/dt)"),
+ (64, "Zonal mean field"),
+ (128, "Time mean field"),
+ (256, "Product of two fields"),
+ (512, "Square root of a field"),
+ (1024, "Difference between fields at levels BLEV and BRLEV"),
+ (2048, "Mean over layer between levels BLEV and BRLEV"),
+ (4096, "Minimum value of field during time period"),
+ (8192, "Maximum value of field during time period"),
+ (16384, "Magnitude of a vector, not specifically wind speed"),
+ (32768, "Log10 of a field"),
+ (65536, "Variance of a field"),
+ (131072, "Mean over an ensemble of parallel runs"))
+
+# lbproc_map is dict mapping lbproc->English and English->lbproc
+# essentially a one to one mapping
+LBPROC_MAP = {x: y for x, y in
+ itertools.chain(LBPROC_PAIRS, ((y, x) for x, y in LBPROC_PAIRS))}
diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py
index e916c3e76d..a75bb0bba9 100644
--- a/lib/iris/fileformats/pp.py
+++ b/lib/iris/fileformats/pp.py
@@ -26,7 +26,6 @@
import abc
import collections
from copy import deepcopy
-import itertools
import operator
import os
import re
@@ -41,8 +40,14 @@
from iris._deprecation import warn_deprecated
from iris._lazy_data import as_concrete_data, as_lazy_data, is_lazy_data
import iris.config
-import iris.fileformats.rules
import iris.fileformats.pp_rules
+from iris.fileformats.pp_save_rules import verify
+
+# NOTE: this is for backwards-compatitibility *ONLY*
+# We could simply remove it for v2.0 ?
+from iris.fileformats._pp_lbproc_pairs import (LBPROC_PAIRS,
+ LBPROC_MAP as lbproc_map)
+import iris.fileformats.rules
import iris.coord_systems
@@ -60,10 +65,6 @@
EARTH_RADIUS = 6371229.0
-# Cube->PP rules are loaded on first use
-_save_rules = None
-
-
PP_HEADER_DEPTH = 256
PP_WORD_DEPTH = 4
NUM_LONG_HEADERS = 45
@@ -221,31 +222,6 @@
'default': np.dtype('>f4'),
}
-# LBPROC codes and their English equivalents
-LBPROC_PAIRS = ((1, "Difference from another experiment"),
- (2, "Difference from zonal (or other spatial) mean"),
- (4, "Difference from time mean"),
- (8, "X-derivative (d/dx)"),
- (16, "Y-derivative (d/dy)"),
- (32, "Time derivative (d/dt)"),
- (64, "Zonal mean field"),
- (128, "Time mean field"),
- (256, "Product of two fields"),
- (512, "Square root of a field"),
- (1024, "Difference between fields at levels BLEV and BRLEV"),
- (2048, "Mean over layer between levels BLEV and BRLEV"),
- (4096, "Minimum value of field during time period"),
- (8192, "Maximum value of field during time period"),
- (16384, "Magnitude of a vector, not specifically wind speed"),
- (32768, "Log10 of a field"),
- (65536, "Variance of a field"),
- (131072, "Mean over an ensemble of parallel runs"))
-
-# lbproc_map is dict mapping lbproc->English and English->lbproc
-# essentially a one to one mapping
-lbproc_map = {x: y for x, y in
- itertools.chain(LBPROC_PAIRS, ((y, x) for x, y in LBPROC_PAIRS))}
-
class STASH(collections.namedtuple('STASH', 'model section item')):
"""
@@ -1786,21 +1762,6 @@ def _field_gen(filename, read_data_bytes, little_ended=False):
yield pp_field
-def _ensure_save_rules_loaded():
- """Makes sure the standard save rules are loaded."""
-
- # Uses these module-level variables
- global _save_rules
-
- if _save_rules is None:
- # Load the pp save rules
- rules_filename = os.path.join(iris.config.CONFIG_PATH,
- 'pp_save_rules.txt')
- with iris.fileformats.rules._disable_deprecation_warnings():
- _save_rules = iris.fileformats.rules.RulesContainer(
- rules_filename, iris.fileformats.rules.ProcedureRule)
-
-
# Stash codes not to be filtered (reference altitude and pressure fields).
_STASH_ALLOW = [STASH(1, 0, 33), STASH(1, 0, 1)]
@@ -2102,8 +2063,6 @@ def save_pairs_from_cube(cube, field_coords=None, target=None):
# On the flip side, record which Cube metadata has been "used" and flag up
# unused?
- _ensure_save_rules_loaded()
-
n_dims = len(cube.shape)
if n_dims < 2:
raise ValueError('Unable to save a cube of fewer than 2 dimensions.')
@@ -2150,8 +2109,7 @@ def save_pairs_from_cube(cube, field_coords=None, target=None):
# Run the PP save rules on the slice2D, to fill the PPField,
# recording the rules that were used
- rules_result = _save_rules.verify(slice2D, pp_field)
- verify_rules_ran = rules_result.matching_rules
+ pp_field = verify(slice2D, pp_field)
yield (slice2D, pp_field)
diff --git a/lib/iris/fileformats/pp_rules.py b/lib/iris/fileformats/pp_rules.py
index a86c5f358e..f709ac30c9 100644
--- a/lib/iris/fileformats/pp_rules.py
+++ b/lib/iris/fileformats/pp_rules.py
@@ -30,9 +30,10 @@
from iris.coords import AuxCoord, CellMethod, DimCoord
from iris.fileformats.rules import (ConversionMetadata, Factory, Reference,
ReferenceTarget)
+import iris.fileformats.pp
+from iris.fileformats._pp_lbproc_pairs import LBPROC_MAP
from iris.fileformats.um_cf_map import (LBFC_TO_CF, STASH_TO_CF,
STASHCODE_IMPLIED_HEIGHTS)
-import iris.fileformats.pp
###############################################################################
@@ -1027,7 +1028,7 @@ def _all_other_rules(f):
if unhandled_lbproc:
attributes["ukmo__process_flags"] = tuple(sorted(
[name
- for value, name in six.iteritems(iris.fileformats.pp.lbproc_map)
+ for value, name in six.iteritems(LBPROC_MAP)
if isinstance(value, int) and f.lbproc & value]))
if (f.lbsrce % 10000) == 1111:
diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py
new file mode 100644
index 0000000000..2d11b757d8
--- /dev/null
+++ b/lib/iris/fileformats/pp_save_rules.py
@@ -0,0 +1,828 @@
+# (C) British Crown Copyright 2017, Met Office
+#
+# This file is part of Iris.
+#
+# Iris is free software: you can redistribute it and/or modify it under
+# the terms of the GNU Lesser General Public License as published by the
+# Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Iris is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with Iris. If not, see .
+
+from __future__ import (absolute_import, division, print_function)
+from six.moves import (filter, input, map, range, zip) # noqa
+import six
+
+import warnings
+
+import iris
+from iris.fileformats._ff_cross_references import STASH_TRANS
+from iris.aux_factory import HybridHeightFactory, HybridPressureFactory
+from iris.fileformats.um_cf_map import CF_TO_LBFC
+from iris.fileformats._pp_lbproc_pairs import LBPROC_MAP
+from iris.fileformats.rules import (aux_factory,
+ has_aux_factory,
+ scalar_cell_method,
+ scalar_coord,
+ vector_coord)
+from iris.util import is_regular, regular_step
+import netcdftime
+
+
+def _basic_coord_system_rules(cube, pp):
+ """
+ Rules for setting the coord system of the PP field.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ if (cube.coord_system("GeogCS") is not None or
+ cube.coord_system(None) is None):
+ pp.bplat = 90
+ pp.bplon = 0
+ elif cube.coord_system("RotatedGeogCS") is not None:
+ pp.bplat = cube.coord_system("RotatedGeogCS").grid_north_pole_latitude
+ pp.bplon = cube.coord_system("RotatedGeogCS").grid_north_pole_longitude
+ return pp
+
+
+def _um_version_rules(cube, pp):
+ from_um_str = "Data from Met Office Unified Model"
+ source_attr = cube.attributes.get('source')
+ if source_attr is not None:
+ um_version = source_attr.rsplit(from_um_str, 1)
+
+ if ('um_version' not in cube.attributes and
+ 'source' in cube.attributes and
+ len(um_version) > 1 and
+ len(um_version[1]) == 0):
+ # UM - no version number.
+ pp.lbsrce = 1111
+ elif ('um_version' not in cube.attributes and
+ 'source' in cube.attributes and
+ len(um_version) > 1 and
+ len(um_version[1]) > 0):
+ # UM - with version number.
+ pp.lbsrce = int(float(um_version[1]) * 1000000) + 1111
+ elif 'um_version' in cube.attributes:
+ # UM - from 'um_version' attribute.
+ um_ver_minor = int(cube.attributes['um_version'].split('.')[1])
+ um_ver_major = int(cube.attributes['um_version'].split('.')[0])
+ pp.lbsrce = 1111 + 10000 * um_ver_minor + 1000000 * um_ver_major
+ return pp
+
+
+def _stash_rules(cube, pp):
+ """
+ Attributes rules for setting the STASH attribute of the PP field.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ if 'STASH' in cube.attributes:
+ stash = cube.attributes['STASH']
+ if isinstance(stash, iris.fileformats.pp.STASH):
+ pp.lbuser[3] = 1000 * (stash.section or 0) + (stash.item or 0)
+ pp.lbuser[6] = (stash.model or 0)
+ return pp
+
+
+def _general_time_rules(cube, pp):
+ """
+ Rules for setting time metadata of the PP field.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ time_coord = scalar_coord(cube, 'time')
+ fp_coord = scalar_coord(cube, 'forecast_period')
+ frt_coord = scalar_coord(cube, 'forecast_reference_time')
+ clim_season_coord = scalar_coord(cube, 'clim_season')
+
+ cm_time_mean = scalar_cell_method(cube, 'mean', 'time')
+ cm_time_min = scalar_cell_method(cube, 'minimum', 'time')
+ cm_time_max = scalar_cell_method(cube, 'maximum', 'time')
+
+ # No forecast.
+ if time_coord is not None and fp_coord is None and frt_coord is None:
+ pp.lbtim.ia = 0
+ pp.lbtim.ib = 0
+ pp.t1 = time_coord.units.num2date(time_coord.points[0])
+ pp.t2 = netcdftime.datetime(0, 0, 0)
+
+ # Forecast.
+ if (time_coord is not None and
+ not time_coord.has_bounds() and
+ fp_coord is not None):
+ pp.lbtim.ia = 0
+ pp.lbtim.ib = 1
+ pp.t1 = time_coord.units.num2date(time_coord.points[0])
+ pp.t2 = time_coord.units.num2date(time_coord.points[0] -
+ fp_coord.points[0])
+ pp.lbft = fp_coord.points[0]
+
+ # Time mean (non-climatological).
+ # XXX This only works when we have a single timestep.
+ if (time_coord is not None and
+ time_coord.has_bounds() and
+ clim_season_coord is None and
+ fp_coord is not None and
+ fp_coord.has_bounds()):
+ # XXX How do we know *which* time to use if there are more than
+ # one? *Can* there be more than one?
+ pp.lbtim.ib = 2
+ pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
+ pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
+ pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')
+
+ if (time_coord is not None and
+ time_coord.has_bounds() and
+ clim_season_coord is None and
+ fp_coord is None and
+ frt_coord is not None):
+ # Handle missing forecast period using time and forecast ref time.
+ pp.lbtim.ib = 2
+ pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
+ pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
+ stop = time_coord.units.convert(time_coord.bounds[0, 1],
+ 'hours since epoch')
+ start = frt_coord.units.convert(frt_coord.points[0],
+ 'hours since epoch')
+ pp.lbft = stop - start
+
+ if (time_coord is not None and
+ time_coord.has_bounds() and
+ clim_season_coord is None and
+ (fp_coord is not None or frt_coord is not None) and
+ cm_time_mean is not None and
+ cm_time_mean.intervals != () and
+ cm_time_mean.intervals[0].endswith('hour')):
+ pp.lbtim.ia = int(cm_time_mean.intervals[0][:-5])
+
+ if (time_coord is not None and
+ time_coord.has_bounds() and
+ clim_season_coord is None and
+ (fp_coord is not None or frt_coord is not None) and
+ (cm_time_mean is None or cm_time_mean.intervals == () or
+ not cm_time_mean.intervals[0].endswith('hour'))):
+ pp.lbtim.ia = 0
+
+ # If the cell methods contain a minimum then overwrite lbtim.ia with this
+ # interval.
+ if (time_coord is not None and
+ time_coord.has_bounds() and
+ clim_season_coord is None and
+ (fp_coord is not None or frt_coord is not None) and
+ cm_time_min is not None and
+ cm_time_min.intervals != () and
+ cm_time_min.intervals[0].endswith('hour')):
+ # Set lbtim.ia with the integer part of the cell method's interval
+ # e.g. if interval is '24 hour' then lbtim.ia becomes 24.
+ pp.lbtim.ia = int(cm_time_min.intervals[0][:-5])
+
+ # If the cell methods contain a maximum then overwrite lbtim.ia with this
+ # interval.
+ if (time_coord is not None and
+ time_coord.has_bounds() and
+ clim_season_coord is None and
+ (fp_coord is not None or frt_coord is not None) and
+ cm_time_max is not None and
+ cm_time_max.intervals != () and
+ cm_time_max.intervals[0].endswith('hour')):
+ # Set lbtim.ia with the integer part of the cell method's interval
+ # e.g. if interval is '1 hour' then lbtim.ia becomes 1.
+ pp.lbtim.ia = int(cm_time_max.intervals[0][:-5])
+
+ if time_coord is not None and time_coord.has_bounds():
+ lower_bound_yr =\
+ time_coord.units.num2date(time_coord.bounds[0, 0]).year
+ upper_bound_yr =\
+ time_coord.units.num2date(time_coord.bounds[0, 1]).year
+ else:
+ lower_bound_yr = None
+ upper_bound_yr = None
+
+ # Climatological time means.
+ if (time_coord is not None and
+ time_coord.has_bounds() and
+ lower_bound_yr == upper_bound_yr and
+ fp_coord is not None and
+ fp_coord.has_bounds() and
+ clim_season_coord is not None and
+ 'clim_season' in cube.cell_methods[-1].coord_names):
+ # Climatological time mean - single year.
+ pp.lbtim.ia = 0
+ pp.lbtim.ib = 2
+ pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
+ pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
+ pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')
+
+ elif (time_coord is not None and
+ time_coord.has_bounds() and
+ lower_bound_yr != upper_bound_yr and
+ fp_coord is not None and
+ fp_coord.has_bounds() and
+ clim_season_coord is not None and
+ 'clim_season' in cube.cell_methods[-1].coord_names and
+ clim_season_coord.points[0] == 'djf'):
+ # Climatological time mean - spanning years - djf.
+ pp.lbtim.ia = 0
+ pp.lbtim.ib = 3
+ pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
+ pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
+ if pp.t1.month == 12:
+ pp.t1 = netcdftime.datetime(pp.t1.year)
+ else:
+ pp.t1 = netcdftime.datetime(pp.t1.year-1, 12, 1, 0, 0, 0)
+ pp.t2 = netcdftime.datetime(pp.t2.year, 3, 1, 0, 0, 0)
+ _conditional_warning(
+ time_coord.bounds[0, 0] != time_coord.units.date2num(pp.t1),
+ "modified t1 for climatological seasonal mean")
+ _conditional_warning(
+ time_coord.bounds[0, 1] != time_coord.units.date2num(pp.t2),
+ "modified t2 for climatological seasonal mean")
+ pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')
+
+ elif (time_coord is not None and
+ time_coord.has_bounds() and
+ lower_bound_yr != upper_bound_yr and
+ fp_coord is not None and
+ fp_coord.has_bounds() and
+ clim_season_coord is not None and
+ 'clim_season' in cube.cell_methods[-1].coord_names and
+ clim_season_coord.points[0] == 'mam'):
+ # Climatological time mean - spanning years - mam.
+ pp.lbtim.ia = 0
+ pp.lbtim.ib = 3
+ # TODO: wut?
+ pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
+ pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
+ pp.t1 = netcdftime.datetime(pp.t1.year, 3, 1, 0, 0, 0)
+ pp.t2 = netcdftime.datetime(pp.t2.year, 6, 1, 0, 0, 0)
+ _conditional_warning(
+ time_coord.bounds[0, 0] != time_coord.units.date2num(pp.t1),
+ "modified t1 for climatological seasonal mean")
+ _conditional_warning(
+ time_coord.bounds[0, 1] != time_coord.units.date2num(pp.t2),
+ "modified t2 for climatological seasonal mean")
+ pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')
+
+ elif (time_coord is not None and
+ time_coord.has_bounds() and
+ lower_bound_yr != upper_bound_yr and
+ fp_coord is not None and
+ fp_coord.has_bounds() and
+ clim_season_coord is not None and
+ 'clim_season' in cube.cell_methods[-1].coord_names and
+ clim_season_coord.points[0] == 'jja'):
+ # Climatological time mean - spanning years - jja.
+ pp.lbtim.ia = 0
+ pp.lbtim.ib = 3
+ # TODO: wut?
+ pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
+ pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
+ pp.t1 = netcdftime.datetime(pp.t1.year, 6, 1, 0, 0, 0)
+ pp.t2 = netcdftime.datetime(pp.t2.year, 9, 1, 0, 0, 0)
+ _conditional_warning(
+ time_coord.bounds[0, 0] != time_coord.units.date2num(pp.t1),
+ "modified t1 for climatological seasonal mean")
+ _conditional_warning(
+ time_coord.bounds[0, 1] != time_coord.units.date2num(pp.t2),
+ "modified t2 for climatological seasonal mean")
+ pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')
+
+ elif (time_coord is not None and
+ time_coord.has_bounds() and
+ lower_bound_yr != upper_bound_yr and
+ fp_coord is not None and
+ fp_coord.has_bounds() and
+ clim_season_coord is not None and
+ 'clim_season' in cube.cell_methods[-1].coord_names and
+ clim_season_coord.points[0] == 'son'):
+ # Climatological time mean - spanning years - son.
+ pp.lbtim.ia = 0
+ pp.lbtim.ib = 3
+ # TODO: wut?
+ pp.t1 = time_coord.units.num2date(time_coord.bounds[0, 0])
+ pp.t2 = time_coord.units.num2date(time_coord.bounds[0, 1])
+ pp.t1 = netcdftime.datetime(pp.t1.year, 9, 1, 0, 0, 0)
+ pp.t2 = netcdftime.datetime(pp.t2.year, 12, 1, 0, 0, 0)
+ _conditional_warning(
+ time_coord.bounds[0, 0] != time_coord.units.date2num(pp.t1),
+ "modified t1 for climatological seasonal mean")
+ _conditional_warning(
+ time_coord.bounds[0, 1] != time_coord.units.date2num(pp.t2),
+ "modified t2 for climatological seasonal mean")
+ pp.lbft = fp_coord.units.convert(fp_coord.bounds[0, 1], 'hours')
+
+ return pp
+
+
+def _calendar_rules(cube, pp):
+ """
+ Rules for setting the calendar of the PP field.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ time_coord = scalar_coord(cube, 'time')
+ if time_coord is not None:
+ if time_coord.units.calendar == '360_day':
+ pp.lbtim.ic = 2
+ elif time_coord.units.calendar == 'gregorian':
+ pp.lbtim.ic = 1
+ elif time_coord.units.calendar == '365_day':
+ pp.lbtim.ic = 4
+ return pp
+
+
+def _grid_and_pole_rules(cube, pp):
+ """
+ Rules for setting the horizontal grid and pole location of the PP field.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ lon_coord = vector_coord(cube, 'longitude')
+ grid_lon_coord = vector_coord(cube, 'grid_longitude')
+ lat_coord = vector_coord(cube, 'latitude')
+ grid_lat_coord = vector_coord(cube, 'grid_latitude')
+
+ if lon_coord and not is_regular(lon_coord):
+ pp.bzx = 0
+ pp.bdx = 0
+ pp.lbnpt = lon_coord.shape[0]
+ pp.x = lon_coord.points
+ elif grid_lon_coord and not is_regular(grid_lon_coord):
+ pp.bzx = 0
+ pp.bdx = 0
+ pp.lbnpt = grid_lon_coord.shape[0]
+ pp.x = grid_lon_coord.points
+ elif lon_coord and is_regular(lon_coord):
+ pp.bzx = lon_coord.points[0] - regular_step(lon_coord)
+ pp.bdx = regular_step(lon_coord)
+ pp.lbnpt = len(lon_coord.points)
+ elif grid_lon_coord and is_regular(grid_lon_coord):
+ pp.bzx = grid_lon_coord.points[0] - regular_step(grid_lon_coord)
+ pp.bdx = regular_step(grid_lon_coord)
+ pp.lbnpt = len(grid_lon_coord.points)
+
+ if lat_coord and not is_regular(lat_coord):
+ pp.bzy = 0
+ pp.bdy = 0
+ pp.lbrow = lat_coord.shape[0]
+ pp.y = lat_coord.points
+ elif grid_lat_coord and not is_regular(grid_lat_coord):
+ pp.bzy = 0
+ pp.bdy = 0
+ pp.lbrow = grid_lat_coord.shape[0]
+ pp.y = grid_lat_coord.points
+ elif lat_coord and is_regular(lat_coord):
+ pp.bzy = lat_coord.points[0] - regular_step(lat_coord)
+ pp.bdy = regular_step(lat_coord)
+ pp.lbrow = len(lat_coord.points)
+ elif grid_lat_coord and is_regular(grid_lat_coord):
+ pp.bzy = grid_lat_coord.points[0] - regular_step(grid_lat_coord)
+ pp.bdy = regular_step(grid_lat_coord)
+ pp.lbrow = len(grid_lat_coord.points)
+
+ # Check if we have a rotated coord system.
+ if cube.coord_system("RotatedGeogCS") is not None:
+ pp.lbcode = int(pp.lbcode) + 100
+
+ # Check if we have a circular x-coord.
+ for lon_coord in (lon_coord, grid_lon_coord):
+ if lon_coord is not None:
+ if lon_coord.circular:
+ pp.lbhem = 0
+ else:
+ pp.lbhem = 3
+
+ return pp
+
+
+def _non_std_cross_section_rules(cube, pp):
+ """
+ Rules for applying non-standard cross-sections to the PP field.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ # Define commonly-used coords.
+ air_pres_coord = vector_coord(cube, 'air_pressure')
+ depth_coord = vector_coord(cube, 'depth')
+ eta_coord = vector_coord(cube, 'eta')
+ lat_coord = vector_coord(cube, 'latitude')
+ time_coord = vector_coord(cube, 'time')
+
+ # Non-standard cross-section with bounds - x=latitude, y=air_pressure.
+ if (air_pres_coord is not None and
+ not air_pres_coord.circular and
+ air_pres_coord.has_bounds() and
+ lat_coord is not None and
+ not lat_coord.circular and
+ lat_coord.has_bounds()):
+ pp.lbcode = 10000 + int(100*10) + 1
+ pp.bgor = 0
+ pp.y = air_pres_coord.points
+ pp.y_lower_bound = air_pres_coord.bounds[:, 0]
+ pp.y_upper_bound = air_pres_coord.bounds[:, 1]
+ pp.x = lat_coord.points
+ pp.x_lower_bound = lat_coord.bounds[:, 0]
+ pp.x_upper_bound = lat_coord.bounds[:, 1]
+ pp.lbrow = air_pres_coord.shape[0]
+ pp.lbnpt = lat_coord.shape[0]
+ pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
+
+ # Non-standard cross-section with bounds - x=latitude, y=depth.
+ if (depth_coord is not None and
+ not depth_coord.circular and
+ depth_coord.has_bounds() and
+ lat_coord is not None and
+ not lat_coord.circular and
+ lat_coord.has_bounds()):
+ pp.lbcode = 10000 + int(100*10) + 4
+ pp.bgor = 0
+ pp.y = depth_coord.points
+ pp.y_lower_bound = depth_coord.bounds[:, 0]
+ pp.y_upper_bound = depth_coord.bounds[:, 1]
+ pp.x = lat_coord.points
+ pp.x_lower_bound = lat_coord.bounds[:, 0]
+ pp.x_upper_bound = lat_coord.bounds[:, 1]
+ pp.lbrow = depth_coord.shape[0]
+ pp.lbnpt = lat_coord.shape[0]
+ pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
+
+ # Non-standard cross-section with bounds - x=latitude, y=eta.
+ if (eta_coord is not None and
+ not eta_coord.circular and
+ eta_coord.has_bounds() and
+ lat_coord is not None and
+ not lat_coord.circular and
+ lat_coord.has_bounds()):
+ pp.lbcode = 10000 + int(100*10) + 3
+ pp.bgor = 0
+ pp.y = eta_coord.points
+ pp.y_lower_bound = eta_coord.bounds[:, 0]
+ pp.y_upper_bound = eta_coord.bounds[:, 1]
+ pp.x = lat_coord.points
+ pp.x_lower_bound = lat_coord.bounds[:, 0]
+ pp.x_upper_bound = lat_coord.bounds[:, 1]
+ pp.lbrow = eta_coord.shape[0]
+ pp.lbnpt = lat_coord.shape[0]
+ pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
+
+ # Non-standard cross-section with bounds - x=days (360 calendar), y=depth.
+ if (depth_coord is not None and
+ not depth_coord.circular and
+ depth_coord.has_bounds() and
+ time_coord is not None and
+ not time_coord.circular and
+ time_coord.has_bounds()):
+ pp.lbcode = 10000 + int(100*23) + 4
+ pp.bgor = 0
+ pp.y = depth_coord.points
+ pp.y_lower_bound = depth_coord.bounds[:, 0]
+ pp.y_upper_bound = depth_coord.bounds[:, 1]
+ pp.x = time_coord.points
+ pp.x_lower_bound = time_coord.bounds[:, 0]
+ pp.x_upper_bound = time_coord.bounds[:, 1]
+ pp.lbrow = depth_coord.shape[0]
+ pp.lbnpt = time_coord.shape[0]
+ pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
+
+ # Non-standard cross-section with bounds -
+ # x=days (360 calendar), y=air_pressure.
+ if (air_pres_coord is not None and
+ not air_pres_coord.circular and
+ air_pres_coord.has_bounds() and
+ time_coord is not None and
+ not time_coord.circular and
+ time_coord.has_bounds()):
+ pp.lbcode = 10000 + int(100*23) + 1
+ pp.bgor = 0
+ pp.y = air_pres_coord.points
+ pp.y_lower_bound = air_pres_coord.bounds[:, 0]
+ pp.y_upper_bound = air_pres_coord.bounds[:, 1]
+ pp.x = time_coord.points
+ pp.x_lower_bound = time_coord.bounds[:, 0]
+ pp.x_upper_bound = time_coord.bounds[:, 1]
+ pp.lbrow = air_pres_coord.shape[0]
+ pp.lbnpt = time_coord.shape[0]
+ pp.bzx = pp.bzy = pp.bdx = pp.bdy = 0
+
+ return pp
+
+
+def _lbproc_rules(cube, pp):
+ """
+ Rules for setting the horizontal grid and pole location of the PP field.
+
+ Note: `pp.lbproc` must be set to 0 before these rules are run.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ # Basic setting (this may be overridden by subsequent rules).
+ pp.lbproc = 0
+
+ if cube.attributes.get("ukmo__process_flags", None):
+ pp.lbproc += sum([LBPROC_MAP[name]
+ for name in cube.attributes["ukmo__process_flags"]])
+
+ # Zonal-mean: look for a CellMethod which is a "mean" over "longitude" or
+ # "grid_longitude".
+ if (scalar_cell_method(cube, 'mean', 'longitude') is not None or
+ scalar_cell_method(cube, 'mean', 'grid_longitude') is not None):
+ pp.lbproc += 64
+
+ # Time-mean: look for a CellMethod which is a "mean" over "time".
+ if scalar_cell_method(cube, 'mean', 'time') is not None:
+ pp.lbproc += 128
+
+ # Time-minimum: look for a CellMethod which is a "minimum" over "time".
+ if scalar_cell_method(cube, 'minimum', 'time') is not None:
+ pp.lbproc += 4096
+
+ # Time-maximum: look for a CellMethod which is a "maximum" over "time".
+ if scalar_cell_method(cube, 'maximum', 'time') is not None:
+ pp.lbproc += 8192
+
+ return pp
+
+
+def _vertical_rules(cube, pp):
+ """
+ Rules for setting vertical levels for the PP field.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ # Define commonly-used coords.
+ air_pres_coord = scalar_coord(cube, 'air_pressure')
+ apt_coord = scalar_coord(cube, 'air_potential_temperature')
+ depth_coord = scalar_coord(cube, 'depth')
+ height_coord = scalar_coord(cube, 'height')
+ level_height_coord = scalar_coord(cube, 'level_height')
+ mln_coord = scalar_coord(cube, 'model_level_number')
+ pressure_coord = scalar_coord(cube, 'pressure')
+ pseudo_level_coord = scalar_coord(cube, 'pseudo_level')
+ sigma_coord = scalar_coord(cube, 'sigma')
+ soil_mln_coord = scalar_coord(cube, 'soil_model_level_number')
+
+ # Define commonly-used aux factories.
+ try:
+ height_factory = aux_factory(cube, HybridHeightFactory)
+ except ValueError:
+ height_factory = None
+ try:
+ pressure_factory = aux_factory(cube, HybridPressureFactory)
+ except ValueError:
+ pressure_factory = None
+
+ # Set `lbuser[5]`.
+ if (pseudo_level_coord is not None and
+ not pseudo_level_coord.bounds):
+ pp.lbuser[4] = pseudo_level_coord.points[0]
+
+ # Single height level.
+ if (height_coord is not None and
+ not height_coord.bounds and
+ height_coord.points[0] == 1.5 and
+ cube.name() == 'air_temperature'):
+ pp.lbvc = 129
+ pp.blev = -1
+
+ if pp.lbvc == 0 and height_coord is not None and not height_coord.bounds:
+ pp.lbvc = 1
+ pp.blev = cube.coord('height').points[0]
+
+ # Single air_pressure level.
+ if air_pres_coord is not None and not air_pres_coord.bounds:
+ pp.lbvc = 8
+ pp.blev = air_pres_coord.points[0]
+
+ # Single pressure level.
+ if pressure_coord is not None and not pressure_coord.bounds:
+ pp.lbvc = 8
+ pp.blev = pressure_coord.points[0]
+
+ # Single depth level (non cross-section).
+ if (mln_coord is not None and
+ not mln_coord.bounds and
+ depth_coord is not None and
+ not depth_coord.bounds):
+ pp.lbvc = 2
+ pp.lblev = mln_coord.points[0]
+ pp.blev = depth_coord.points[0]
+
+ # Single depth level (Non-dimensional soil model level).
+ if (soil_mln_coord is not None and
+ not soil_mln_coord.has_bounds() and
+ air_pres_coord is None and
+ depth_coord is None and
+ height_coord is None and
+ pressure_coord is None and
+ cube.standard_name is not None and
+ 'soil' in cube.standard_name):
+ pp.lbvc = 6
+ pp.lblev = soil_mln_coord.points[0]
+ pp.blev = pp.lblev
+ pp.brsvd[0] = 0
+ pp.brlev = 0
+
+ # Single depth level (soil depth).
+ if (depth_coord is not None and
+ depth_coord.has_bounds() and
+ air_pres_coord is None and
+ soil_mln_coord is None and
+ mln_coord is None and
+ height_coord is None and
+ pressure_coord is None and
+ cube.standard_name is not None and
+ 'soil' in cube.standard_name):
+ pp.lbvc = 6
+ pp.blev = depth_coord.points[0]
+ pp.brsvd[0] = depth_coord.bounds[0, 0]
+ pp.brlev = depth_coord.bounds[0, 1]
+
+ # Single potential-temperature level.
+ if (apt_coord is not None and
+ not apt_coord.bounds and
+ air_pres_coord is None and
+ depth_coord is None and
+ height_coord is None and
+ pressure_coord is None and
+ mln_coord is None):
+ pp.lbvc = 19
+ pp.lblev = apt_coord.points[0]
+ pp.blev = apt_coord.points[0]
+
+ # Single hybrid_height level
+ # (without aux factory e.g. due to missing orography).
+ if (not has_aux_factory(cube, HybridHeightFactory) and
+ mln_coord is not None and
+ mln_coord.bounds is None and
+ level_height_coord is not None and
+ level_height_coord.bounds is not None and
+ sigma_coord is not None and
+ sigma_coord.bounds is not None):
+ pp.lbvc = 65
+ pp.lblev = mln_coord.points[0]
+ pp.blev = level_height_coord.points[0]
+ pp.brlev = level_height_coord.bounds[0, 0]
+ pp.brsvd[0] = level_height_coord.bounds[0, 1]
+ pp.bhlev = sigma_coord.points[0]
+ pp.bhrlev = sigma_coord.bounds[0, 0]
+ pp.brsvd[1] = sigma_coord.bounds[0, 1]
+
+ # Single hybrid_height level (with aux factory).
+ if (has_aux_factory(cube, HybridHeightFactory) and
+ mln_coord is not None and
+ mln_coord.bounds is None and
+ height_factory.dependencies['delta'] is not None and
+ height_factory.dependencies['delta'].bounds is not None and
+ height_factory.dependencies['sigma'] is not None and
+ height_factory.dependencies['sigma'].bounds is not None):
+ pp.lbvc = 65
+ pp.lblev = mln_coord.points[0]
+ pp.blev = height_factory.dependencies['delta'].points[0]
+ pp.brlev = height_factory.dependencies['delta'].bounds[0, 0]
+ pp.brsvd[0] = height_factory.dependencies['delta'].bounds[0, 1]
+ pp.bhlev = height_factory.dependencies['sigma'].points[0]
+ pp.bhrlev = height_factory.dependencies['sigma'].bounds[0, 0]
+ pp.brsvd[1] = height_factory.dependencies['sigma'].bounds[0, 1]
+
+ # Single hybrid pressure level.
+ if (has_aux_factory(cube, HybridPressureFactory) and
+ mln_coord is not None and
+ mln_coord.bounds is None and
+ pressure_factory.dependencies['delta'] is not None and
+ pressure_factory.dependencies['delta'].bounds is not None and
+ pressure_factory.dependencies['sigma'] is not None and
+ pressure_factory.dependencies['sigma'].bounds is not None):
+ pp.lbvc = 9
+ pp.lblev = mln_coord.points[0]
+ pp.blev = pressure_factory.dependencies['sigma'].points[0]
+ pp.brlev = pressure_factory.dependencies['sigma'].bounds[0, 0]
+ pp.brsvd[0] = pressure_factory.dependencies['sigma'].bounds[0, 1]
+ pp.bhlev = pressure_factory.dependencies['delta'].points[0]
+ pp.bhrlev = pressure_factory.dependencies['delta'].bounds[0, 0]
+ pp.brsvd[1] = pressure_factory.dependencies['delta'].bounds[0, 1]
+
+ return pp
+
+
+def _mdi_rules(cube, pp):
+ """
+ Rules for setting the MDI (Missing Data Indicator) value of the PP field.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ if cube.fill_value is not None:
+ pp.bmdi = cube.fill_value
+ else:
+ pp.bmdi = -1e30
+
+ return pp
+
+
+def _all_other_rules(cube, pp):
+ """
+ Rules for setting the horizontal grid and pole location of the PP field.
+
+ Args:
+ cube: the cube being saved as a series of PP fields.
+ pp: the current PP field having save rules applied.
+
+ Returns:
+ The PP field with updated metadata.
+
+ """
+ # "CFNAME mega-rule."
+ check_items = (cube.standard_name, cube.long_name, str(cube.units))
+ if check_items in CF_TO_LBFC:
+ pp.lbfc = CF_TO_LBFC[check_items]
+
+ # Set STASH code.
+ if ('STASH' in cube.attributes and
+ str(cube.attributes['STASH']) in STASH_TRANS):
+ pp.lbfc = STASH_TRANS[str(cube.attributes['STASH'])].field_code
+
+ return pp
+
+
+def verify(cube, field):
+ # Rules functions.
+ field = _basic_coord_system_rules(cube, field)
+ field = _um_version_rules(cube, field)
+ field = _stash_rules(cube, field)
+ field = _general_time_rules(cube, field)
+ field = _calendar_rules(cube, field)
+ field = _grid_and_pole_rules(cube, field)
+ field = _non_std_cross_section_rules(cube, field)
+ field = _lbproc_rules(cube, field)
+ field = _vertical_rules(cube, field)
+ field = _mdi_rules(cube, field)
+ field = _all_other_rules(cube, field)
+
+ return field
+
+
+# Helper functions used when running the rules.
+
+def _conditional_warning(condition, warning):
+ if condition:
+ warnings.warn(warning)
diff --git a/lib/iris/fileformats/rules.py b/lib/iris/fileformats/rules.py
index 5b2dbc7603..d156fa8b9e 100644
--- a/lib/iris/fileformats/rules.py
+++ b/lib/iris/fileformats/rules.py
@@ -911,4 +911,3 @@ def loadcubes_user_callback_wrapper(cube, field, filename):
converter=loader.converter,
user_callback_wrapper=loadcubes_user_callback_wrapper):
yield cube
-
diff --git a/lib/iris/tests/integration/test_pp.py b/lib/iris/tests/integration/test_pp.py
index 235205d591..e3238fccd3 100644
--- a/lib/iris/tests/integration/test_pp.py
+++ b/lib/iris/tests/integration/test_pp.py
@@ -32,6 +32,7 @@
from iris.cube import Cube
import iris.fileformats.pp
import iris.fileformats.pp_rules
+from iris.fileformats.pp_save_rules import verify
from iris.exceptions import IgnoreCubeException
from iris.tests import mock
from iris.fileformats.pp import load_pairs_from_fields
@@ -73,8 +74,7 @@ def test_soil_level_round_trip(self):
field.lbvc = 0
field.brsvd = [None] * 4
field.brlev = None
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(cube, field)
+ field = verify(cube, field)
# Check the vertical coordinate is as originally specified.
self.assertEqual(field.lbvc, 6)
@@ -111,8 +111,7 @@ def test_soil_depth_round_trip(self):
field.lbvc = 0
field.brlev = None
field.brsvd = [None] * 4
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(cube, field)
+ field = verify(cube, field)
# Check the vertical coordinate is as originally specified.
self.assertEqual(field.lbvc, 6)
@@ -144,8 +143,7 @@ def test_potential_temperature_level_round_trip(self):
field = iris.fileformats.pp.PPField3()
field.lbfc = 0
field.lbvc = 0
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(cube, field)
+ field = verify(cube, field)
# Check the vertical coordinate is as originally specified.
self.assertEqual(field.lbvc, 19)
@@ -214,15 +212,14 @@ def field_with_data(scale=1):
pressure_field.lbvc = 0
pressure_field.brsvd = [None, None]
pressure_field.lbuser = [None] * 7
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(pressure_cube, pressure_field)
+ pressure_field = verify(pressure_cube, pressure_field)
data_field = iris.fileformats.pp.PPField3()
data_field.lbfc = 0
data_field.lbvc = 0
data_field.brsvd = [None, None]
data_field.lbuser = [None] * 7
- iris.fileformats.pp._save_rules.verify(data_cube, data_field)
+ data_field = verify(data_cube, data_field)
# The reference surface field should have STASH=409
self.assertArrayEqual(pressure_field.lbuser,
@@ -306,8 +303,7 @@ def test_hybrid_height_with_non_standard_coords(self):
field.lbvc = 0
field.brsvd = [None, None]
field.lbuser = [None] * 7
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(cube, field)
+ field = verify(cube, field)
self.assertEqual(field.blev, delta)
self.assertEqual(field.brlev, delta_lower)
@@ -343,8 +339,7 @@ def test_hybrid_pressure_with_non_standard_coords(self):
field.lbvc = 0
field.brsvd = [None, None]
field.lbuser = [None] * 7
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(cube, field)
+ field = verify(cube, field)
self.assertEqual(field.bhlev, delta)
self.assertEqual(field.bhrlev, delta_lower)
@@ -409,8 +404,7 @@ def field_with_data(scale=1):
data_field.lbvc = 0
data_field.brsvd = [None, None]
data_field.lbuser = [None] * 7
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(data_cube, data_field)
+ data_field = verify(data_cube, data_field)
# Check the data field has the vertical coordinate as originally
# specified.
@@ -446,8 +440,7 @@ def convert_cube_to_field(self, cube):
field.lbfc = 0
field.lbvc = 0
field.lbtim = 0
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(cube, field)
+ field = verify(cube, field)
return field
def test_time_mean_from_forecast_period(self):
@@ -633,9 +626,7 @@ def create_cube(self, longitude_coord='longitude'):
def convert_cube_to_field(self, cube):
field = iris.fileformats.pp.PPField3()
field.lbvc = 0
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(cube, field)
- return field
+ return verify(cube, field)
def test_time_mean_only(self):
cube = self.create_cube()
diff --git a/lib/iris/tests/test_cube_to_pp.py b/lib/iris/tests/test_cube_to_pp.py
index 6ee905a1ff..fa78f95166 100644
--- a/lib/iris/tests/test_cube_to_pp.py
+++ b/lib/iris/tests/test_cube_to_pp.py
@@ -184,8 +184,7 @@ def test_365_calendar_export(self):
# Add an extra "fill_value" property, as used by the save rules.
cube.fill_value = None
pp_field = mock.MagicMock(spec=PPField3)
- iris.fileformats.pp._ensure_save_rules_loaded()
- iris.fileformats.pp._save_rules.verify(cube, pp_field)
+ iris.fileformats.pp_save_rules.verify(cube, pp_field)
self.assertEqual(pp_field.lbtim.ic, 4)
diff --git a/lib/iris/tests/test_pp_to_cube.py b/lib/iris/tests/test_pp_to_cube.py
index 9c6b5ec9de..e4a74da282 100644
--- a/lib/iris/tests/test_pp_to_cube.py
+++ b/lib/iris/tests/test_pp_to_cube.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2010 - 2015, Met Office
+# (C) British Crown Copyright 2010 - 2017, Met Office
#
# This file is part of Iris.
#
diff --git a/lib/iris/tests/unit/fileformats/pp/test_save.py b/lib/iris/tests/unit/fileformats/pp/test_save.py
index 4246834d9d..0784132915 100644
--- a/lib/iris/tests/unit/fileformats/pp/test_save.py
+++ b/lib/iris/tests/unit/fileformats/pp/test_save.py
@@ -1,4 +1,4 @@
-# (C) British Crown Copyright 2014 - 2016, Met Office
+# (C) British Crown Copyright 2014 - 2017, Met Office
#
# This file is part of Iris.
#
@@ -26,6 +26,7 @@
from iris.coords import DimCoord, CellMethod
from iris.fileformats._ff_cross_references import STASH_TRANS
import iris.fileformats.pp as pp
+from iris.fileformats.pp_save_rules import _lbproc_rules
from iris.tests import mock
import iris.tests.stock as stock
@@ -162,26 +163,38 @@ def test_um_version(self):
class Test_Save__LbprocProduction(tests.IrisTest):
+ # This test class is a little different to the others.
+ # If it called `pp.save` via `_pp_save_ppfield_values` it would run
+ # `pp_save_rules.verify` and run all the save rules. As this class uses
+ # a 3D cube with a time coord it would run the time rules, which would fail
+ # because the mock object does not set up the `pp.lbtim` attribute
+ # correctly (i.e. as a `SplittableInt` object).
+ # To work around this we call the lbproc rules directly here.
+
def setUp(self):
self.cube = stock.realistic_3d()
+ self.pp_field = mock.MagicMock(spec=pp.PPField3)
+ self.pp_field.HEADER_DEFN = pp.PPField3.HEADER_DEFN
+ self.patch('iris.fileformats.pp.PPField3',
+ return_value=self.pp_field)
def test_no_cell_methods(self):
- lbproc = _pp_save_ppfield_values(self.cube).lbproc
+ lbproc = _lbproc_rules(self.cube, self.pp_field).lbproc
self.assertEqual(lbproc, 0)
def test_mean(self):
self.cube.cell_methods = (CellMethod('mean', 'time', '1 hour'),)
- lbproc = _pp_save_ppfield_values(self.cube).lbproc
+ lbproc = _lbproc_rules(self.cube, self.pp_field).lbproc
self.assertEqual(lbproc, 128)
def test_minimum(self):
self.cube.cell_methods = (CellMethod('minimum', 'time', '1 hour'),)
- lbproc = _pp_save_ppfield_values(self.cube).lbproc
+ lbproc = _lbproc_rules(self.cube, self.pp_field).lbproc
self.assertEqual(lbproc, 4096)
def test_maximum(self):
self.cube.cell_methods = (CellMethod('maximum', 'time', '1 hour'),)
- lbproc = _pp_save_ppfield_values(self.cube).lbproc
+ lbproc = _lbproc_rules(self.cube, self.pp_field).lbproc
self.assertEqual(lbproc, 8192)