Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
# https://flake8.readthedocs.io/en/latest/user/error-codes.html
# https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes

max-line-length = 80
select = C,E,F,W,B,B950
ignore =
# E203: whitespace before ':'
E203,
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ repos:
# Don't commit to master branch.
- id: no-commit-to-branch
- repo: https://github.com/psf/black
rev: '19.10b0'
rev: '20.8b1'
hooks:
- id: black
# Force black to run on whole repo, using settings from pyproject.toml
Expand Down
4 changes: 3 additions & 1 deletion docs/iris/gallery_code/general/plot_custom_file_loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -314,7 +314,9 @@ def main():
# draw contour levels for the data (the top level is just a catch-all)
levels = (0.0002, 0.002, 0.004, 1e10)
cs = iplt.contourf(
cube, levels=levels, colors=("#80ffff", "#939598", "#e00404"),
cube,
levels=levels,
colors=("#80ffff", "#939598", "#e00404"),
)

# draw a black outline at the lowest contour to highlight affected areas
Expand Down
3 changes: 3 additions & 0 deletions docs/iris/src/whatsnew/latest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,9 @@ This document explains the changes made to Iris for this release
`matplotlib.rcdefaults <https://matplotlib.org/3.1.1/api/matplotlib_configuration_api.html?highlight=rcdefaults#matplotlib.rcdefaults>`_,
instead the tests will **always** use ``agg``. (:pull:`3846`)

* `@bjlittle`_ migrated the `black`_ support from ``19.10b0`` to ``20.8b1``.
(:pull:`3866`)


.. _Read the Docs: https://scitools-iris.readthedocs.io/en/latest/
.. _Matplotlib: https://matplotlib.org/
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/_concatenate.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def name(self):
return self.defn.name()


class _OtherMetaData(namedtuple("OtherMetaData", ["defn", "dims"],)):
class _OtherMetaData(namedtuple("OtherMetaData", ["defn", "dims"])):
"""
Container for the metadata that defines a cell measure or ancillary
variable.
Expand Down
6 changes: 4 additions & 2 deletions lib/iris/analysis/_area_weighted.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,10 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1):
self._mdtol = mdtol

# Store regridding information
_regrid_info = eregrid._regrid_area_weighted_rectilinear_src_and_grid__prepare(
src_grid_cube, target_grid_cube
_regrid_info = (
eregrid._regrid_area_weighted_rectilinear_src_and_grid__prepare(
src_grid_cube, target_grid_cube
)
)
(
src_x,
Expand Down
12 changes: 8 additions & 4 deletions lib/iris/analysis/_regrid.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,11 +480,15 @@ def __call__(self, src):
for slice_cube in src.slices(sx):
if self._regrid_info is None:
# Calculate the basic regrid info just once.
self._regrid_info = _regrid_weighted_curvilinear_to_rectilinear__prepare(
slice_cube, self.weights, self._target_cube
self._regrid_info = (
_regrid_weighted_curvilinear_to_rectilinear__prepare(
slice_cube, self.weights, self._target_cube
)
)
slice_result = (
_regrid_weighted_curvilinear_to_rectilinear__perform(
slice_cube, self._regrid_info
)
slice_result = _regrid_weighted_curvilinear_to_rectilinear__perform(
slice_cube, self._regrid_info
)
result_slices.append(slice_result)
result = result_slices.merge_cube()
Expand Down
12 changes: 8 additions & 4 deletions lib/iris/analysis/calculus.py
Original file line number Diff line number Diff line change
Expand Up @@ -623,10 +623,14 @@ def curl(i_cube, j_cube, k_cube=None):
# (d/dtheta (i_cube * sin(lat)) - d_j_cube_dphi)
# phi_cmpt = 1/r * ( d/dr (r * j_cube) - d_k_cube_dtheta)
# theta_cmpt = 1/r * ( 1/cos(lat) * d_k_cube_dphi - d/dr (r * i_cube)
if y_coord.name() not in [
"latitude",
"grid_latitude",
] or x_coord.name() not in ["longitude", "grid_longitude"]:
if (
y_coord.name()
not in [
"latitude",
"grid_latitude",
]
or x_coord.name() not in ["longitude", "grid_longitude"]
):
raise ValueError(
"Expecting latitude as the y coord and "
"longitude as the x coord for spherical curl."
Expand Down
1 change: 0 additions & 1 deletion lib/iris/common/mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

from collections.abc import Mapping
from functools import wraps
import re

import cf_units

Expand Down
6 changes: 4 additions & 2 deletions lib/iris/common/resolve.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@
)

_CategoryItems = namedtuple(
"CategoryItems", ["items_dim", "items_aux", "items_scalar"],
"CategoryItems",
["items_dim", "items_aux", "items_scalar"],
)

_DimCoverage = namedtuple(
Expand All @@ -50,7 +51,8 @@
_PreparedFactory = namedtuple("PreparedFactory", ["container", "dependencies"])

_PreparedItem = namedtuple(
"PreparedItem", ["metadata", "points", "bounds", "dims", "container"],
"PreparedItem",
["metadata", "points", "bounds", "dims", "container"],
)

_PreparedMetadata = namedtuple("PreparedMetadata", ["combined", "src", "tgt"])
Expand Down
8 changes: 4 additions & 4 deletions lib/iris/cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -3647,8 +3647,8 @@ def __eq__(self, other):

# having checked the metadata, now check the coordinates
if result:
coord_compares = iris.analysis._dimensional_metadata_comparison(
self, other
coord_compares = (
iris.analysis._dimensional_metadata_comparison(self, other)
)
# if there are any coordinates which are not equal
result = not (
Expand All @@ -3658,7 +3658,7 @@ def __eq__(self, other):

if result:
cm_compares = iris.analysis._dimensional_metadata_comparison(
self, other, object_get=Cube.cell_measures,
self, other, object_get=Cube.cell_measures
)
# if there are any cell measures which are not equal
result = not (
Expand All @@ -3668,7 +3668,7 @@ def __eq__(self, other):

if result:
av_compares = iris.analysis._dimensional_metadata_comparison(
self, other, object_get=Cube.ancillary_variables,
self, other, object_get=Cube.ancillary_variables
)
# if there are any ancillary variables which are not equal
result = not (
Expand Down
32 changes: 16 additions & 16 deletions lib/iris/fileformats/_ff.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,23 +42,23 @@
("model_version", (12,)),
("obs_file_type", (14,)),
("last_fieldop_type", (15,)),
("first_validity_time", (21, 22, 23, 24, 25, 26, 27,)),
("last_validity_time", (28, 29, 30, 31, 32, 33, 34,)),
("misc_validity_time", (35, 36, 37, 38, 39, 40, 41,)),
("integer_constants", (100, 101,)),
("real_constants", (105, 106,)),
("level_dependent_constants", (110, 111, 112,)),
("row_dependent_constants", (115, 116, 117,)),
("column_dependent_constants", (120, 121, 122,)),
("fields_of_constants", (125, 126, 127,)),
("extra_constants", (130, 131,)),
("temp_historyfile", (135, 136,)),
("compressed_field_index1", (140, 141,)),
("compressed_field_index2", (142, 143,)),
("compressed_field_index3", (144, 145,)),
("lookup_table", (150, 151, 152,)),
("first_validity_time", (21, 22, 23, 24, 25, 26, 27)),
("last_validity_time", (28, 29, 30, 31, 32, 33, 34)),
("misc_validity_time", (35, 36, 37, 38, 39, 40, 41)),
("integer_constants", (100, 101)),
("real_constants", (105, 106)),
("level_dependent_constants", (110, 111, 112)),
("row_dependent_constants", (115, 116, 117)),
("column_dependent_constants", (120, 121, 122)),
("fields_of_constants", (125, 126, 127)),
("extra_constants", (130, 131)),
("temp_historyfile", (135, 136)),
("compressed_field_index1", (140, 141)),
("compressed_field_index2", (142, 143)),
("compressed_field_index3", (144, 145)),
("lookup_table", (150, 151, 152)),
("total_prognostic_fields", (153,)),
("data", (160, 161, 162,)),
("data", (160, 161, 162)),
]

# Offset value to convert from UM_FIXED_LENGTH_HEADER positions to
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/fileformats/cf.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
# NetCDF variable attributes handled by the netCDF4 module and
# therefore automatically classed as "used" attributes.
_CF_ATTRS_IGNORE = set(
["_FillValue", "add_offset", "missing_value", "scale_factor",]
["_FillValue", "add_offset", "missing_value", "scale_factor"]
)

#: Supported dimensionless vertical coordinate reference surface/phemomenon
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/fileformats/name_loaders.py
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ def _generate_cubes(
circular = False
if coord.name == "flight_level":
icoord = DimCoord(
points=pts, units=coord_units, long_name=long_name,
points=pts, units=coord_units, long_name=long_name
)
else:
icoord = DimCoord(
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/fileformats/netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -1066,7 +1066,7 @@ def write(
`chunksizes` and `endian` keywords are silently ignored for netCDF
3 files that do not use HDF5.

"""
"""
if unlimited_dimensions is None:
unlimited_dimensions = []

Expand Down
6 changes: 3 additions & 3 deletions lib/iris/fileformats/nimrod_load_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -417,7 +417,7 @@ def coord_system(field, handle_metadata_errors):
"Plotting and reprojection may be impaired."
)
coord_sys = iris.coord_systems.TransverseMercator(
*crs_args, iris.coord_systems.GeogCS(**ellipsoid),
*crs_args, iris.coord_systems.GeogCS(**ellipsoid)
)
elif field.horizontal_grid_type == 1:
coord_sys = iris.coord_systems.GeogCS(**ellipsoid)
Expand Down Expand Up @@ -456,7 +456,7 @@ def horizontal_grid(cube, field, handle_metadata_errors):
dtype=np.float32,
)
x_coord = DimCoord(
points, standard_name=x_coord_name, units=units_name, coord_system=crs,
points, standard_name=x_coord_name, units=units_name, coord_system=crs
)
cube.add_dim_coord(x_coord, 1)
points = np.linspace(
Expand All @@ -467,7 +467,7 @@ def horizontal_grid(cube, field, handle_metadata_errors):
dtype=np.float32,
)
y_coord = DimCoord(
points, standard_name=y_coord_name, units=units_name, coord_system=crs,
points, standard_name=y_coord_name, units=units_name, coord_system=crs
)
cube.add_dim_coord(y_coord, 0)

Expand Down
12 changes: 6 additions & 6 deletions lib/iris/fileformats/pp.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,10 +105,10 @@
("lbproj", (31,)),
("lbtyp", (32,)),
("lblev", (33,)),
("lbrsvd", (34, 35, 36, 37,)),
("lbrsvd", (34, 35, 36, 37)),
("lbsrce", (38,)),
("lbuser", (39, 40, 41, 42, 43, 44, 45,)),
("brsvd", (46, 47, 48, 49,)),
("lbuser", (39, 40, 41, 42, 43, 44, 45)),
("brsvd", (46, 47, 48, 49)),
("bdatum", (50,)),
("bacc", (51,)),
("blev", (52,)),
Expand Down Expand Up @@ -163,10 +163,10 @@
("lbproj", (31,)),
("lbtyp", (32,)),
("lblev", (33,)),
("lbrsvd", (34, 35, 36, 37,)),
("lbrsvd", (34, 35, 36, 37)),
("lbsrce", (38,)),
("lbuser", (39, 40, 41, 42, 43, 44, 45,)),
("brsvd", (46, 47, 48, 49,)),
("lbuser", (39, 40, 41, 42, 43, 44, 45)),
("brsvd", (46, 47, 48, 49)),
("bdatum", (50,)),
("bacc", (51,)),
("blev", (52,)),
Expand Down
2 changes: 1 addition & 1 deletion lib/iris/plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ def _check_bounds_contiguity_and_mask(coord, data, atol=None, rtol=None):
)

not_masked_at_discontiguity_along_y = np.any(
np.logical_and(mask_invert[:-1,], diffs_along_y)
np.logical_and(mask_invert[:-1], diffs_along_y)
)

not_masked_at_discontiguity = (
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/tests/test_cdm.py
Original file line number Diff line number Diff line change
Expand Up @@ -1135,7 +1135,7 @@ def assert_is_not_lazy(self, cube):
self.assertFalse(cube.has_lazy_data())

def test_slices(self):
lat_cube = next(self.cube.slices(["grid_latitude",]))
lat_cube = next(self.cube.slices(["grid_latitude"]))
self.assert_is_lazy(lat_cube)
self.assert_is_lazy(self.cube)

Expand Down Expand Up @@ -1459,7 +1459,7 @@ def test_coord_conversion(self):

# List of string and unicode
self.assertEqual(
len(cube._as_list_of_coords(["grid_longitude", "grid_latitude"],)),
len(cube._as_list_of_coords(["grid_longitude", "grid_latitude"])),
2,
)

Expand Down
4 changes: 2 additions & 2 deletions lib/iris/tests/test_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -1038,11 +1038,11 @@ def _makecube(self, y, cm=False, av=False):
cube.add_aux_coord(iris.coords.DimCoord(y, long_name="y"))
if cm:
cube.add_cell_measure(
iris.coords.CellMeasure([1, 1], long_name="foo"), 0,
iris.coords.CellMeasure([1, 1], long_name="foo"), 0
)
if av:
cube.add_ancillary_variable(
iris.coords.AncillaryVariable([1, 1], long_name="bar"), 0,
iris.coords.AncillaryVariable([1, 1], long_name="bar"), 0
)
return cube

Expand Down
10 changes: 5 additions & 5 deletions lib/iris/tests/test_netcdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,25 +425,25 @@ def test_deferred_loading(self):

# Consecutive tuple index on same dimension.
self.assertCML(
cube[(0, 8, 4, 2, 14, 12),],
cube[((0, 8, 4, 2, 14, 12),)],
("netcdf", "netcdf_deferred_tuple_0.cml"),
)
self.assertCML(
cube[(0, 8, 4, 2, 14, 12),][(0, 2, 4, 1),],
cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)],
("netcdf", "netcdf_deferred_tuple_1.cml"),
)
subcube = cube[(0, 8, 4, 2, 14, 12),][(0, 2, 4, 1),][
subcube = cube[((0, 8, 4, 2, 14, 12),)][((0, 2, 4, 1),)][
(1, 3),
]
self.assertCML(subcube, ("netcdf", "netcdf_deferred_tuple_2.cml"))

# Consecutive mixture on same dimension.
self.assertCML(
cube[0:20:2][(9, 5, 8, 0),][3],
cube[0:20:2][((9, 5, 8, 0),)][3],
("netcdf", "netcdf_deferred_mix_0.cml"),
)
self.assertCML(
cube[(2, 7, 3, 4, 5, 0, 9, 10),][2:6][3],
cube[((2, 7, 3, 4, 5, 0, 9, 10),)][2:6][3],
("netcdf", "netcdf_deferred_mix_0.cml"),
)
self.assertCML(
Expand Down
4 changes: 2 additions & 2 deletions lib/iris/tests/test_nimrod.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def test_huge_field_load(self):
"probability_fields",
}:
cube = iris.load(
tests.get_data_path(("NIMROD", "uk2km", "cutouts", datafile,))
tests.get_data_path(("NIMROD", "uk2km", "cutouts", datafile))
)
self.assertCML(cube, ("nimrod", f"{datafile}.cml"))

Expand All @@ -87,7 +87,7 @@ def test_load_kwarg(self):
"Ellipsoid not supported, proj_biaxial_ellipsoid:-32767, horizontal_grid_type:0",
):
with open(
tests.get_data_path(("NIMROD", "uk2km", "cutouts", datafile,)),
tests.get_data_path(("NIMROD", "uk2km", "cutouts", datafile)),
"rb",
) as infile:
iris.fileformats.nimrod_load_rules.run(
Expand Down
4 changes: 1 addition & 3 deletions lib/iris/tests/test_pp_cf.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,7 @@ def _test_file(self, name):
else:
fname_name = name

self.assertCML(
cubes, self._ref_dir + ("from_pp", fname_name + ".cml",)
)
self.assertCML(cubes, self._ref_dir + ("from_pp", fname_name + ".cml"))

# 2) Save the Cube and check the netCDF
nc_filenames = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,10 @@ def extract_grid(self, cube):
def check_mdtol(self, mdtol=None):
src_grid, target_grid = self.grids()
# Get _regrid_info result
_regrid_info = eregrid._regrid_area_weighted_rectilinear_src_and_grid__prepare(
src_grid, target_grid
_regrid_info = (
eregrid._regrid_area_weighted_rectilinear_src_and_grid__prepare(
src_grid, target_grid
)
)
self.assertEqual(len(_regrid_info), 9)
with mock.patch(
Expand Down
Loading