diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ac83b6178b..100eae0ac8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -61,3 +61,10 @@ repos: hooks: - id: sort-all types: [file, python] + +- repo: https://github.com/numpy/numpydoc + rev: v1.6.0 + hooks: + - id: numpydoc-validation + exclude: "^lib/iris/tests/|docs/gallery_code/" + types: [file, python] diff --git a/benchmarks/asv_delegated_conda.py b/benchmarks/asv_delegated_conda.py index c8070b063a..7d8b6e109c 100644 --- a/benchmarks/asv_delegated_conda.py +++ b/benchmarks/asv_delegated_conda.py @@ -47,17 +47,15 @@ def __init__( Parameters ---------- - conf : Config instance - + conf : Config + Config instance. python : str Version of Python. Must be of the form "MAJOR.MINOR". - requirements : dict Dictionary mapping a PyPI package name to a version identifier string. - tagged_env_vars : dict - Environment variables, tagged for build vs. non-build + Environment variables, tagged for build vs. non-build. """ ignored = ["`python`"] diff --git a/benchmarks/benchmarks/__init__.py b/benchmarks/benchmarks/__init__.py index 87a77fa5a4..e969b1f23e 100644 --- a/benchmarks/benchmarks/__init__.py +++ b/benchmarks/benchmarks/__init__.py @@ -10,11 +10,12 @@ def disable_repeat_between_setup(benchmark_object): - """Benchmarks where object persistence would be inappropriate (decorator). + """Benchmark where object persistence would be inappropriate (decorator). E.g: - * Benchmarking data realisation - * Benchmarking Cube coord addition + + * Benchmarking data realisation + * Benchmarking Cube coord addition Can be applied to benchmark classes/methods/functions. @@ -107,14 +108,15 @@ def _wrapper(*args, **kwargs): def on_demand_benchmark(benchmark_object): - """Disables these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. + """Disable these benchmark(s) unless ON_DEMAND_BENCHARKS env var is set. This is a decorator. For benchmarks that, for whatever reason, should not be run by default. E.g: - * Require a local file - * Used for scalability analysis instead of commit monitoring. + + * Require a local file + * Used for scalability analysis instead of commit monitoring. Can be applied to benchmark classes/methods/functions. diff --git a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py index 10711d0349..f589620aea 100644 --- a/benchmarks/benchmarks/experimental/ugrid/regions_combine.py +++ b/benchmarks/benchmarks/experimental/ugrid/regions_combine.py @@ -92,7 +92,7 @@ def setup_cache(self): ) def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): - """Combine-tests "standard" setup operation. + """Combine tests "standard" setup operation. Load the source cubes (full-mesh + region) from disk. These are specific to the cubesize parameter. diff --git a/benchmarks/benchmarks/import_iris.py b/benchmarks/benchmarks/import_iris.py index 566ffca78b..ff5f19e421 100644 --- a/benchmarks/benchmarks/import_iris.py +++ b/benchmarks/benchmarks/import_iris.py @@ -3,7 +3,7 @@ # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""import iris benchmarking.""" +"""Import iris benchmarking.""" from importlib import import_module, reload diff --git a/benchmarks/benchmarks/load/__init__.py b/benchmarks/benchmarks/load/__init__.py index 8a7aa182d3..c977e924af 100644 --- a/benchmarks/benchmarks/load/__init__.py +++ b/benchmarks/benchmarks/load/__init__.py @@ -112,7 +112,7 @@ class ManyVars: @staticmethod def _create_file(save_path: str) -> None: - """Is run externally - everything must be self-contained.""" + """Run externally - everything must be self-contained.""" import numpy as np from iris import save diff --git a/benchmarks/benchmarks/metadata_manager_factory.py b/benchmarks/benchmarks/metadata_manager_factory.py index 01a2b661b8..cd50a767a1 100644 --- a/benchmarks/benchmarks/metadata_manager_factory.py +++ b/benchmarks/benchmarks/metadata_manager_factory.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""metadata_manager_factory benchmark tests.""" +"""Metadata manager factory benchmark tests.""" from iris.common import ( AncillaryVariableMetadata, diff --git a/benchmarks/benchmarks/sperf/combine_regions.py b/benchmarks/benchmarks/sperf/combine_regions.py index 7d677ed74f..d5572c58ea 100644 --- a/benchmarks/benchmarks/sperf/combine_regions.py +++ b/benchmarks/benchmarks/sperf/combine_regions.py @@ -83,7 +83,7 @@ def setup_cache(self): ) def setup(self, n_cubesphere, imaginary_data=True, create_result_cube=True): - """Combine-tests "standard" setup operation. + """Combine tests "standard" setup operation. Load the source cubes (full-mesh + region) from disk. These are specific to the cubesize parameter. diff --git a/benchmarks/bm_runner.py b/benchmarks/bm_runner.py index 10dc5f469a..3511eb1083 100644 --- a/benchmarks/bm_runner.py +++ b/benchmarks/bm_runner.py @@ -66,7 +66,7 @@ def _check_requirements(package: str) -> None: def _prep_data_gen_env() -> None: - """Create/access a separate, unchanging environment for generating test data.""" + """Create or access a separate, unchanging environment for generating test data.""" python_version = "3.11" data_gen_var = "DATA_GEN_PYTHON" if data_gen_var in environ: diff --git a/docs/gallery_code/general/plot_custom_aggregation.py b/docs/gallery_code/general/plot_custom_aggregation.py index 540f785ed6..65fadfb473 100644 --- a/docs/gallery_code/general/plot_custom_aggregation.py +++ b/docs/gallery_code/general/plot_custom_aggregation.py @@ -39,14 +39,14 @@ def count_spells(data, threshold, axis, spell_length): Parameters ---------- data : array - raw data to be compared with value threshold. + Raw data to be compared with value threshold. threshold : float - threshold point for 'significant' datapoints. + Threshold point for 'significant' datapoints. axis : int - number of the array dimension mapping the time sequences. - (Can also be negative, e.g. '-1' means last dimension) + Number of the array dimension mapping the time sequences. + (Can also be negative, e.g. '-1' means last dimension). spell_length : int - number of consecutive times at which value > threshold to "count". + Number of consecutive times at which value > threshold to "count". """ if axis < 0: diff --git a/docs/gallery_code/general/plot_rotated_pole_mapping.py b/docs/gallery_code/general/plot_rotated_pole_mapping.py index 60b187ee56..e9e3656184 100644 --- a/docs/gallery_code/general/plot_rotated_pole_mapping.py +++ b/docs/gallery_code/general/plot_rotated_pole_mapping.py @@ -1,6 +1,6 @@ """ Rotated Pole Mapping -===================== +==================== This example uses several visualisation methods to achieve an array of differing images, including: diff --git a/docs/src/conf.py b/docs/src/conf.py index 89133d0e1b..f5d0d37c21 100644 --- a/docs/src/conf.py +++ b/docs/src/conf.py @@ -17,7 +17,7 @@ # serve to show the default. # ---------------------------------------------------------------------------- -"""sphinx config.""" +"""Config for sphinx.""" import datetime from importlib.metadata import version as get_version diff --git a/docs/src/developers_guide/documenting/docstrings_attribute.py b/docs/src/developers_guide/documenting/docstrings_attribute.py index 9b85ecb201..1714373a62 100644 --- a/docs/src/developers_guide/documenting/docstrings_attribute.py +++ b/docs/src/developers_guide/documenting/docstrings_attribute.py @@ -1,4 +1,4 @@ -"""docstring attribute example.""" +"""Docstring attribute example.""" class ExampleClass: diff --git a/docs/src/developers_guide/documenting/docstrings_sample_routine.py b/docs/src/developers_guide/documenting/docstrings_sample_routine.py index 4c26bc3569..7feec6dbd0 100644 --- a/docs/src/developers_guide/documenting/docstrings_sample_routine.py +++ b/docs/src/developers_guide/documenting/docstrings_sample_routine.py @@ -1,4 +1,4 @@ -"""docstring routine example.""" +"""Docstring routine example.""" def sample_routine(arg1, arg2, kwarg1="foo", kwarg2=None): @@ -12,16 +12,16 @@ def sample_routine(arg1, arg2, kwarg1="foo", kwarg2=None): First argument description. arg2 : numpy.ndarray Second argument description. - kwarg1: str, optional + kwarg1 : str, optional The first keyword argument. This argument description can be multi-lined. - kwarg2 : bool, optional + **kwarg2 : bool, optional The second keyword argument. Returns ------- numpy.ndarray - numpy.ndarray of arg1 * arg2 + A numpy.ndarray of arg1 * arg2. """ pass diff --git a/docs/src/further_topics/filtering_warnings.rst b/docs/src/further_topics/filtering_warnings.rst index f39e6153f2..5175475922 100644 --- a/docs/src/further_topics/filtering_warnings.rst +++ b/docs/src/further_topics/filtering_warnings.rst @@ -47,9 +47,9 @@ Warnings: >>> my_operation() ... - iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) - iris/coord_systems.py:768: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( Warnings can be suppressed using the Python warnings filter with the ``ignore`` @@ -110,7 +110,7 @@ You can target specific Warning messages, e.g. ... warnings.filterwarnings("ignore", message="Discarding false_easting") ... my_operation() ... - iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) :: @@ -125,16 +125,16 @@ Or you can target Warnings raised by specific lines of specific modules, e.g. .. doctest:: filtering_warnings >>> with warnings.catch_warnings(): - ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=442) + ... warnings.filterwarnings("ignore", module="iris.coord_systems", lineno=444) ... my_operation() ... - iris/coord_systems.py:768: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. + iris/coord_systems.py:770: IrisDefaultingWarning: Discarding false_easting and false_northing that are not used by Cartopy. warnings.warn( :: - python -W ignore:::iris.coord_systems:442 - export PYTHONWARNINGS=ignore:::iris.coord_systems:442 + python -W ignore:::iris.coord_systems:444 + export PYTHONWARNINGS=ignore:::iris.coord_systems:444 Warnings from a Common Source ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -188,7 +188,7 @@ module during execution: ... ) ... my_operation() ... - iris/coord_systems.py:442: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. + iris/coord_systems.py:444: IrisUserWarning: Setting inverse_flattening does not affect other properties of the GeogCS object. To change other properties set them explicitly or create a new GeogCS instance. warnings.warn(wmsg, category=iris.warnings.IrisUserWarning) ---- diff --git a/lib/iris/_concatenate.py b/lib/iris/_concatenate.py index 7011df0924..8d929c2af2 100644 --- a/lib/iris/_concatenate.py +++ b/lib/iris/_concatenate.py @@ -160,7 +160,7 @@ class _DerivedCoordAndDims( Parameters ---------- coord : :class:`iris.coord.DimCoord` or :class:`iris.coord.AuxCoord` - dims: tuple + dims : tuple A tuple of the data dimension(s) spanned by the coordinate. aux_factory : :class:`iris.aux_factory.AuxCoordFactory` @@ -196,7 +196,7 @@ def __new__(cls, ancil, dims): Parameters ---------- - ancil : :class:`iris.coord.CellMeasure` or :class:`iris.coord.AncillaryVariable`. + ancil : :class:`iris.coord.CellMeasure` or :class:`iris.coord.AncillaryVariable` dims : The dimension(s) associated with ancil. @@ -271,7 +271,6 @@ class _CoordExtent(namedtuple("CoordExtent", ["points", "bounds"])): ---------- points : :class:`_Extent` The :class:`_Extent` of the coordinate point values. - bounds : A list containing the :class:`_Extent` of the coordinate lower bound and the upper bound. Defaults to None if no associated @@ -297,7 +296,7 @@ def concatenate( cubes : iterable of :class:`iris.cube.Cube` An iterable containing one or more :class:`iris.cube.Cube` instances to be concatenated together. - error_on_mismatch: bool, default=False + error_on_mismatch : bool, default=False If True, raise an informative :class:`~iris.exceptions.ContatenateError` if registration fails. check_aux_coords : bool, default=True @@ -491,7 +490,7 @@ def _coordinate_differences(self, other, attr, reason="metadata"): between `self` and `other`. reason : str, default="metadata" The reason to give for mismatch (function is normally, but not - always, testing metadata) + always, testing metadata). Returns ------- @@ -763,9 +762,9 @@ def axis(self): return self._axis def concatenate(self): - """Concatenates all the source-cubes registered with the :class:`_ProtoCube`. + """Concatenate all the source-cubes registered with the :class:`_ProtoCube`. - Concatenates all the source-cubes registered with the + Concatenate all the source-cubes registered with the :class:`_ProtoCube` over the nominated common dimension. Returns diff --git a/lib/iris/_constraints.py b/lib/iris/_constraints.py index 4c993885a8..b8f4665b46 100644 --- a/lib/iris/_constraints.py +++ b/lib/iris/_constraints.py @@ -282,9 +282,9 @@ def __init__(self, coord_name, coord_thing): Parameters ---------- coord_name : str - The name of the coordinate to constrain + The name of the coordinate to constrain. coord_thing : - The object to compare + The object to compare. """ self.coord_name = coord_name @@ -492,9 +492,9 @@ def list_of_constraints(constraints): def as_constraint(thing): - """Casts an object into a cube constraint where possible. + """Cast an object into a cube constraint where possible. - Casts an object into a cube constraint where possible, otherwise + Cast an object into a cube constraint where possible, otherwise a TypeError will be raised. If the given object is already a valid constraint then the given object diff --git a/lib/iris/_data_manager.py b/lib/iris/_data_manager.py index 15dfbd0030..dbd122ba04 100644 --- a/lib/iris/_data_manager.py +++ b/lib/iris/_data_manager.py @@ -171,7 +171,7 @@ def _deepcopy(self, memo, data=None): @property def data(self): - """Returns the real data. Any lazy data being managed will be realised. + """Return the real data. Any lazy data being managed will be realised. Returns ------- diff --git a/lib/iris/_lazy_data.py b/lib/iris/_lazy_data.py index 36c0825ad8..7791b654a1 100644 --- a/lib/iris/_lazy_data.py +++ b/lib/iris/_lazy_data.py @@ -338,7 +338,7 @@ def as_concrete_data(data): Parameters ---------- data : - A dask array, NumPy `ndarray` or masked array + A dask array, NumPy `ndarray` or masked array. Returns ------- diff --git a/lib/iris/_merge.py b/lib/iris/_merge.py index 85012c0ef8..8d1a0f052a 100644 --- a/lib/iris/_merge.py +++ b/lib/iris/_merge.py @@ -877,7 +877,7 @@ def _build_separable_group( dependency on any other candidate dimensions within the space. group : A set of related (chained) inseparable candidate dimensions. - separable_consistent_groups: + separable_consistent_groups : A list of candidate dimension groups that are consistently separable. positions : A list containing a dictionary of candidate dimension key to @@ -1047,7 +1047,7 @@ def derive_space(groups, relation_matrix, positions, function_matrix=None): ---------- groups : A list of all related (chained) inseparable candidate dimensions. - relation_matrix: + relation_matrix : The relation dictionary for each candidate dimension. positions : A list containing a dictionary of candidate dimension key to @@ -1294,7 +1294,7 @@ def register(self, cube, error_on_mismatch=False): cube : Candidate :class:`iris.cube.Cube` to be associated with this :class:`ProtoCube`. - error_on_mismatch :bool, default=False + error_on_mismatch : bool, default=False If True, raise an informative :class:`~iris.exceptions.MergeError` if registration fails. @@ -1335,7 +1335,8 @@ def _guess_axis(self, name): Returns ------- - axis : {'T', 'Z', 'Y', 'X'} or None. + str or None + {'T', 'Z', 'Y', 'X'} or None. """ axis = None diff --git a/lib/iris/_shapefiles.py b/lib/iris/_shapefiles.py index 351e798ae5..74b24b6627 100644 --- a/lib/iris/_shapefiles.py +++ b/lib/iris/_shapefiles.py @@ -35,20 +35,20 @@ def create_shapefile_mask( ---------- geometry : :class:`shapely.Geometry` cube : :class:`iris.cube.Cube` - A :class:`~iris.cube.Cube` which has 1d x and y coordinates + A :class:`~iris.cube.Cube` which has 1d x and y coordinates. minimum_weight : float, default 0.0 A float between 0 and 1 determining what % of a cell a shape must cover for the cell to remain unmasked. eg: 0.1 means that at least 10% of the shape overlaps the cell to be unmasked. Requires geometry to be a Polygon or MultiPolygon - Defaults to 0.0 (eg only test intersection) + Defaults to 0.0 (eg only test intersection). Returns ------- :class:`np.array` An array of the shape of the x & y coordinates of the cube, with points - to mask equal to True + to mask equal to True. """ from iris.cube import Cube, CubeList @@ -121,19 +121,19 @@ def _transform_coord_system(geometry, cube, geometry_system=None): Parameters ---------- - geometry: :class:`shapely.Geometry` - cube: :class:`iris.cube.Cube` + geometry : :class:`shapely.Geometry` + cube : :class:`iris.cube.Cube` :class:`~iris.cube.Cube` with the coord_system to be projected to and - a x coordinate - geometry_system: :class:`iris.coord_systems`, optional + a x coordinate. + geometry_system : :class:`iris.coord_systems`, optional A :class:`~iris.coord_systems` object describing the coord_system of the shapefile. Defaults to None, - which is treated as GeogCS + which is treated as GeogCS. Returns ------- :class:`shapely.Geometry` - A transformed copy of the provided :class:`shapely.Geometry` + A transformed copy of the provided :class:`shapely.Geometry`. """ y_name, x_name = _cube_primary_xy_coord_names(cube) @@ -196,7 +196,7 @@ def _cube_primary_xy_coord_names(cube): Returns ------- tuple of str - The names of the primary latitude and longitude coordinates + The names of the primary latitude and longitude coordinates. """ latc = ( @@ -225,12 +225,12 @@ def _get_mod_rebased_coord_bounds(coord): Parameters ---------- coord : :class:`iris.coords.Coord` - An Iris coordinate with a modulus + An Iris coordinate with a modulus. Returns ------- :class:`np.array` - A 1d Numpy array of [start,end] pairs for bounds of the coord + A 1d Numpy array of [start,end] pairs for bounds of the coord. """ modulus = coord.units.modulus diff --git a/lib/iris/analysis/__init__.py b/lib/iris/analysis/__init__.py index 773e804a14..7972282201 100644 --- a/lib/iris/analysis/__init__.py +++ b/lib/iris/analysis/__init__.py @@ -203,7 +203,7 @@ def _dimensional_metadata_comparison(*cubes, object_get=None): Convenience function to help compare coordinates, cell-measures or ancillary-variables, on one or more cubes, by their metadata. - .. Note:: + .. note:: Up to Iris 2.x, this _used_ to be the public API method "iris.analysis.coord_comparison". @@ -217,70 +217,72 @@ def _dimensional_metadata_comparison(*cubes, object_get=None): Parameters ---------- cubes : iterable of `iris.cube.Cube` - a set of cubes whose coordinates, cell-measures or ancillary-variables are to + A set of cubes whose coordinates, cell-measures or ancillary-variables are to be compared. object_get : callable(cube) or None, optional If not None, this must be a cube method returning a list of all cube elements of the required type, i.e. one of `iris.cube.Cube.coords`, `iris.cube.Cube.cell_measures`, or `iris.cube.Cube.ancillary_variables`. - If not specified, defaults to `iris.cube.Cube.coords` + If not specified, defaults to `iris.cube.Cube.coords`. Returns ------- - result : dict mapping str, list of _CoordGroup + (dict mapping str, list of _CoordGroup) A dictionary whose keys are match categories and values are groups of coordinates, cell-measures or ancillary-variables. - The values of the returned dictionary are lists of _CoordGroup representing - grouped coordinates. Each _CoordGroup contains all the input 'cubes', and a - matching list of the coord within each cube that matches some specific CoordDefn - (or maybe None). - - The keys of the returned dictionary are strings naming 'categories' : Each - represents a statement, - "Given these cubes list the coordinates which, - when grouped by metadata, are/have..." - - Returned Keys: - - * **grouped_coords**. - A list of coordinate groups of all the coordinates grouped together - by their coordinate definition - * **ungroupable**. - A list of coordinate groups which contain at least one None, - meaning not all Cubes provide an equivalent coordinate - * **not_equal**. - A list of coordinate groups of which not all are equal - (superset of ungroupable) - * **no_data_dimension**> - A list of coordinate groups of which all have no data dimensions on - their respective cubes - * **scalar**> - A list of coordinate groups of which all have shape (1, ) - * **non_equal_data_dimension**. - A list of coordinate groups of which not all have the same - data dimension on their respective cubes - * **non_equal_shape**. - A list of coordinate groups of which not all have the same shape - * **equal_data_dimension**. - A list of coordinate groups of which all have the same data dimension - on their respective cubes - * **equal**. - A list of coordinate groups of which all are equal - * **ungroupable_and_dimensioned**. - A list of coordinate groups of which not all cubes had an equivalent - (in metadata) coordinate which also describe a data dimension - * **dimensioned**. - A list of coordinate groups of which all describe a data dimension on - their respective cubes - * **ignorable**. - A list of scalar, ungroupable non_equal coordinate groups - * **resamplable**. - A list of equal, different data dimensioned coordinate groups - * **transposable**. - A list of non equal, same data dimensioned, non scalar coordinate groups - - Example usage:: + The values of the returned dictionary are lists of _CoordGroup representing + grouped coordinates. Each _CoordGroup contains all the input 'cubes', and a + matching list of the coord within each cube that matches some specific CoordDefn + (or maybe None). + + The keys of the returned dictionary are strings naming 'categories' : Each + represents a statement, + "Given these cubes list the coordinates which, + when grouped by metadata, are/have..." + + Returned Keys: + + * **grouped_coords**. + A list of coordinate groups of all the coordinates grouped together + by their coordinate definition + * **ungroupable**. + A list of coordinate groups which contain at least one None, + meaning not all Cubes provide an equivalent coordinate + * **not_equal**. + A list of coordinate groups of which not all are equal + (superset of ungroupable) + * **no_data_dimension**> + A list of coordinate groups of which all have no data dimensions on + their respective cubes + * **scalar**> + A list of coordinate groups of which all have shape (1, ) + * **non_equal_data_dimension**. + A list of coordinate groups of which not all have the same + data dimension on their respective cubes + * **non_equal_shape**. + A list of coordinate groups of which not all have the same shape + * **equal_data_dimension**. + A list of coordinate groups of which all have the same data dimension + on their respective cubes + * **equal**. + A list of coordinate groups of which all are equal + * **ungroupable_and_dimensioned**. + A list of coordinate groups of which not all cubes had an equivalent + (in metadata) coordinate which also describe a data dimension + * **dimensioned**. + A list of coordinate groups of which all describe a data dimension on + their respective cubes + * **ignorable**. + A list of scalar, ungroupable non_equal coordinate groups + * **resamplable**. + A list of equal, different data dimensioned coordinate groups + * **transposable**. + A list of non equal, same data dimensioned, non scalar coordinate groups + + Examples + -------- + :: result = _dimensional_metadata_comparison(cube1, cube2) print('All equal coordinates: ', result['equal']) @@ -607,14 +609,16 @@ def update_metadata(self, cube, coords, **kwargs): else: # new style (preferred) cube.units = self.units_func(cube.units, **kwargs) - def post_process(self, collapsed_cube, data_result, coords, **kwargs): + def post_process( + self, collapsed_cube, data_result, coords, **kwargs + ): # numpydoc ignore=SS05 """Process the result from :func:`iris.analysis.Aggregator.aggregate`. Parameters ---------- - collapsed_cube: :class:`iris.cube.Cube`. + collapsed_cube : :class:`iris.cube.Cube` data_result : - Result from :func:`iris.analysis.Aggregator.aggregate` + Result from :func:`iris.analysis.Aggregator.aggregate`. coords : The one or more coordinates that were aggregated over. **kwargs : dict, optional @@ -779,14 +783,16 @@ def lazy_aggregate(self, data, axis, **kwargs): """ return self._base_aggregate(data, axis, lazy=True, **kwargs) - def post_process(self, collapsed_cube, data_result, coords, **kwargs): + def post_process( + self, collapsed_cube, data_result, coords, **kwargs + ): # numpydoc ignore=SS05 """Process the result from :func:`iris.analysis.Aggregator.aggregate`. Parameters ---------- collapsed_cube : :class:`iris.cube.Cube` data_result : - Result from :func:`iris.analysis.Aggregator.aggregate` + Result from :func:`iris.analysis.Aggregator.aggregate`. coords : The one or more coordinates that were aggregated over. **kwargs : dict, optional @@ -945,7 +951,9 @@ def __init__(self, units_func=None, lazy_func=None, **kwargs): #: A list of keywords associated with weighted behaviour. self._weighting_keywords = ["returned", "weights"] - def post_process(self, collapsed_cube, data_result, coords, **kwargs): + def post_process( + self, collapsed_cube, data_result, coords, **kwargs + ): # numpydoc ignore=SS05 """Process the result from :func:`iris.analysis.Aggregator.aggregate`. Returns a tuple(cube, weights) if a tuple(data, weights) was returned @@ -955,7 +963,7 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): ---------- collapsed_cube : :class:`iris.cube.Cube` data_result : - Result from :func:`iris.analysis.Aggregator.aggregate` + Result from :func:`iris.analysis.Aggregator.aggregate`. coords : The one or more coordinates that were aggregated over. **kwargs : dict, optional @@ -1101,7 +1109,9 @@ def uses_weighting(self, **kwargs): break return result - def post_process(self, collapsed_cube, data_result, coords, **kwargs): + def post_process( + self, collapsed_cube, data_result, coords, **kwargs + ): # numpydoc ignore=SS05 """Process the result from :func:`iris.analysis.Aggregator.aggregate`. Returns a tuple(cube, weights) if a tuple(data, weights) was returned @@ -1111,7 +1121,7 @@ def post_process(self, collapsed_cube, data_result, coords, **kwargs): ---------- collapsed_cube : :class:`iris.cube.Cube` data_result : - Result from :func:`iris.analysis.Aggregator.aggregate` + Result from :func:`iris.analysis.Aggregator.aggregate`. coords : The one or more coordinates that were aggregated over. **kwargs : dict, optional @@ -1363,14 +1373,14 @@ def _percentile(data, percent, fast_percentile_method=False, **kwargs): Parameters ---------- - dataM : array-like - array from which percentiles are to be calculated - fast_percentile_method: bool, optional + data : array-like + Array from which percentiles are to be calculated. + fast_percentile_method : bool, optional When set to True, uses the numpy.percentiles method as a faster alternative to the scipy.mstats.mquantiles method. Does not handle masked arrays. **kwargs : dict, optional - passed to scipy.stats.mstats.mquantiles if fast_percentile_method is + Passed to scipy.stats.mstats.mquantiles if fast_percentile_method is False. Otherwise passed to numpy.percentile. """ @@ -1406,20 +1416,20 @@ def _weighted_quantile_1D(data, weights, quantiles, **kwargs): Parameters ---------- data : array - One dimensional data array - weights: array + One dimensional data array. + weights : array Array of the same size of `data`. If data is masked, weights must have matching mask. quantiles : float or sequence of floats Quantile(s) to compute. Must have a value between 0 and 1. **kwargs : dict, optional - passed to `scipy.interpolate.interp1d` + Passed to `scipy.interpolate.interp1d`. Returns ------- array or float. Calculated quantile values (set to np.nan wherever sum - of weights is zero or masked) + of weights is zero or masked). """ # Return np.nan if no usable points found if np.isclose(weights.sum(), 0.0) or ma.is_masked(weights.sum()): @@ -1457,9 +1467,9 @@ def _weighted_percentile(data, axis, weights, percent, returned=False, **kwargs) ---------- data : ndarray or masked array axis : int - axis to calculate percentiles over + Axis to calculate percentiles over. weights : ndarray - array with the weights. Must have same shape as data + Array with the weights. Must have same shape as data. percent : float or sequence of floats Percentile rank/s at which to extract value/s. returned : bool, default=False @@ -2641,7 +2651,7 @@ def interpolator(self, cube, coords): Returns ------- A callable with the interface: ``callable(sample_points, collapse_scalar=True)`` - where `sample_points` is a sequence containing an array of values + Where `sample_points` is a sequence containing an array of values for each of the coordinates passed to this method, and ``collapse_scalar`` determines whether to remove length one dimensions in the result cube caused by scalar values in @@ -2690,7 +2700,7 @@ def regridder(self, src_grid, target_grid): Returns ------- A callable with the interface ``callable(cube)`` - where `cube` is a cube with the same grid as ``src_grid`` + Where `cube` is a cube with the same grid as ``src_grid`` that is to be regridded to the ``target_grid``. """ @@ -2770,7 +2780,7 @@ def regridder(self, src_grid_cube, target_grid_cube): Returns ------- A callable with the interface `callable(cube)` - where `cube` is a cube with the same grid as `src_grid_cube` + Where `cube` is a cube with the same grid as `src_grid_cube` that is to be regridded to the grid of `target_grid_cube`. """ @@ -2917,7 +2927,7 @@ class UnstructuredNearest: must be. Otherwise, the corresponding X and Y coordinates must have the same units in the source and grid cubes. - .. Note:: + .. note:: Currently only supports regridding, not interpolation. """ @@ -2972,7 +2982,7 @@ def regridder(self, src_cube, target_grid): Returns ------- A callable with the interface `callable(cube)` - where `cube` is a cube with the same grid as `src_cube` + Where `cube` is a cube with the same grid as `src_cube` that is to be regridded to the `target_grid`. """ diff --git a/lib/iris/analysis/_area_weighted.py b/lib/iris/analysis/_area_weighted.py index 263f83838c..9a63a49457 100644 --- a/lib/iris/analysis/_area_weighted.py +++ b/lib/iris/analysis/_area_weighted.py @@ -39,7 +39,7 @@ def __init__(self, src_grid_cube, target_grid_cube, mdtol=1): Notes ----- - .. Note:: + .. note:: Both source and target cubes must have an XY grid defined by separate X and Y dimensions with dimension coordinates. diff --git a/lib/iris/analysis/_grid_angles.py b/lib/iris/analysis/_grid_angles.py index 8712dd9ad1..80b73d81d7 100644 --- a/lib/iris/analysis/_grid_angles.py +++ b/lib/iris/analysis/_grid_angles.py @@ -21,13 +21,13 @@ def _3d_xyz_from_latlon(lon, lat): Parameters ---------- - lon, lat: float array + lon, lat : float array Arrays of longitudes and latitudes, in degrees. Both the same shape. Returns ------- - xyz : array, dtype=float64 + array of dtype=float64 Cartesian coordinates on a unit sphere. Shape is (3, ). The x / y / z coordinates are in xyz[0 / 1 / 2]. @@ -53,12 +53,12 @@ def _latlon_from_xyz(xyz): xyz : array Array of 3-D cartesian coordinates. Shape (3, ). - x / y / z values are in xyz[0 / 1 / 2], + x / y / z values are in xyz[0 / 1 / 2]. Returns ------- - lonlat : array - longitude and latitude position angles, in degrees. + np.array + Longitude and latitude position angles, in degrees. Shape (2, ). The longitudes / latitudes are in lonlat[0 / 1]. @@ -114,7 +114,7 @@ def _angle(p, q, r): Returns ------- - angle : float array + float array Grid angles relative to true-East, in degrees. Positive when grid-East is anticlockwise from true-East. Shape is same as . @@ -156,7 +156,7 @@ def gridcell_angles(x, y=None, cell_angle_boundpoints="mid-lhs, mid-rhs"): Parameters ---------- x : :class:`~iris.cube.Cube` - a grid cube with 2D X and Y coordinates, identified by 'axis'. + A grid cube with 2D X and Y coordinates, identified by 'axis'. The coordinates must be 2-dimensional with the same shape. The two dimensions represent grid dimensions in the order Y, then X. x, y : :class:`~iris.coords.Coord` @@ -166,10 +166,10 @@ def gridcell_angles(x, y=None, cell_angle_boundpoints="mid-lhs, mid-rhs"): If there is no coordinate system, they are assumed to be true longitudes and latitudes. Units must convertible to 'degrees'. x, y : 2-dimensional arrays of same shape (ny, nx) - longitude and latitude cell center locations, in degrees. + Longitude and latitude cell center locations, in degrees. The two dimensions represent grid dimensions in the order Y, then X. x, y : 3-dimensional arrays of same shape (ny, nx, 4) - longitude and latitude cell bounds, in degrees. + Longitude and latitude cell bounds, in degrees. The first two dimensions are grid dimensions in the order Y, then X. The last index maps cell corners anticlockwise from bottom-left. cell_angle_boundpoints : str, default="mid-lhs, mid-rhs" @@ -182,7 +182,7 @@ def gridcell_angles(x, y=None, cell_angle_boundpoints="mid-lhs, mid-rhs"): Returns ------- - angles : 2-dimensional cube + 2-dimensional cube Cube of angles of grid-x vector from true Eastward direction for each gridcell, in degrees. It also has "true" longitude and latitude coordinates, with no @@ -412,7 +412,7 @@ def rotate_grid_vectors(u_cube, v_cube, grid_angles_cube=None, grid_angles_kwarg Cubes of grid-u and grid-v vector components. Units should be differentials of true-distance, e.g. 'm/s'. grid_angles_cube : cube, optional - gridcell orientation angles. + Gridcell orientation angles. Units must be angular, i.e. can be converted to 'radians'. If not provided, grid angles are estimated from 'u_cube' using the :func:`gridcell_angles` method. @@ -422,20 +422,16 @@ def rotate_grid_vectors(u_cube, v_cube, grid_angles_cube=None, grid_angles_kwarg Returns ------- - true_u, true_v : cube - Cubes of true-north oriented vector components. + (cube, cube) + Tuple of cubes of true-north oriented vector components. Units are same as inputs. Notes ----- - .. note:: - - Vector magnitudes will always be the same as the inputs. - - .. note:: + Vector magnitudes will always be the same as the inputs. - This function does not maintain laziness when called; it realises data. - See more at :doc:`/userguide/real_and_lazy_data`. + This function does not maintain laziness when called; it realises data. + See more at :doc:`/userguide/real_and_lazy_data`. """ u_out, v_out = (cube.copy() for cube in (u_cube, v_cube)) diff --git a/lib/iris/analysis/_interpolation.py b/lib/iris/analysis/_interpolation.py index babc414ee4..6904c5ae4f 100644 --- a/lib/iris/analysis/_interpolation.py +++ b/lib/iris/analysis/_interpolation.py @@ -180,7 +180,7 @@ def __init__(self, src_cube, coords, method, extrapolation_mode): The :class:`iris.cube.Cube` which is to be interpolated. coords : The names or coordinate instances which are to be - interpolated over + interpolated over. method : Either 'linear' or 'nearest'. extrapolation_mode : str @@ -477,7 +477,7 @@ def _points(self, sample_points, data, data_dims=None): sample_points : A list of N iterables, where N is the number of coordinates passed to the constructor. - [sample_values_for_coord_0, sample_values_for_coord_1, ...] + [sample_values_for_coord_0, sample_values_for_coord_1, ...]. data : The data to interpolate - not necessarily the data from the cube that was used to construct this interpolator. If the data has @@ -573,7 +573,7 @@ def __call__(self, sample_points, collapse_scalar=True): sample_points : A list of N iterables, where N is the number of coordinates passed to the constructor. - [sample_values_for_coord_0, sample_values_for_coord_1, ...] + [sample_values_for_coord_0, sample_values_for_coord_1, ...]. collapse_scalar : bool, default=True Whether to collapse the dimension of the scalar sample points in the resulting cube. Default is True. diff --git a/lib/iris/analysis/_regrid.py b/lib/iris/analysis/_regrid.py index b85265e5d9..42c6bad499 100644 --- a/lib/iris/analysis/_regrid.py +++ b/lib/iris/analysis/_regrid.py @@ -30,14 +30,17 @@ def _transform_xy_arrays(crs_from, x, y, crs_to): Parameters ---------- - crs_from, crs_to : :class:`cartopy.crs.Projection` + crs_from : :class:`cartopy.crs.Projection` The coordinate reference systems. x, y : arrays - point locations defined in 'crs_from'. + Point locations defined in 'crs_from'. + crs_to : :class:`cartopy.crs.Projection` + The coordinate reference systems. Returns ------- - x, y : Arrays of locations defined in 'crs_to'. + (array, array) + Arrays of locations defined in 'crs_to' of (x, y). """ pts = crs_to.transform_points(crs_from, x, y) @@ -636,7 +639,7 @@ def _regrid( A 2-dimensional array of sample X values. sample_grid_y : A 2-dimensional array of sample Y values. - method: str, default="linear" + method : str, default="linear" Either 'linear' or 'nearest'. The default method is 'linear'. extrapolation_mode : str, default="nanmask" Must be one of the following strings: diff --git a/lib/iris/analysis/calculus.py b/lib/iris/analysis/calculus.py index 6955e847dc..75b7db050b 100644 --- a/lib/iris/analysis/calculus.py +++ b/lib/iris/analysis/calculus.py @@ -134,7 +134,7 @@ def cube_delta(cube, coord): Parameters ---------- coord : - either a Coord instance or the unique name of a coordinate in the cube. + Either a Coord instance or the unique name of a coordinate in the cube. If a Coord instance is provided, it does not necessarily have to exist in the cube. @@ -423,7 +423,7 @@ def _coord_sin(coord): Parameters ---------- coord : - Coord instance with values in either degrees or radians + Coord instance with values in either degrees or radians. """ return _trig_method(coord, np.sin) @@ -435,7 +435,7 @@ def _coord_cos(coord): Parameters ---------- coord : - Coord instance with values in either degrees or radians + Coord instance with values in either degrees or radians. """ return _trig_method(coord, np.cos) @@ -447,9 +447,9 @@ def _trig_method(coord, trig_function): Parameters ---------- coord : - Coord instance with points values in either degrees or radians + Coord instance with points values in either degrees or radians. trig_function : - Reference to a trigonometric function e.g. numpy.sin + Reference to a trigonometric function e.g. numpy.sin. """ # If we are in degrees create a copy that is in radians. @@ -483,11 +483,11 @@ def curl(i_cube, j_cube, k_cube=None): Parameters ---------- i_cube : - The i cube of the vector to operate on + The i cube of the vector to operate on. j_cube : - The j cube of the vector to operate on + The j cube of the vector to operate on. k_cube : optional - The k cube of the vector to operate on + The k cube of the vector to operate on. Returns ------- diff --git a/lib/iris/analysis/cartography.py b/lib/iris/analysis/cartography.py index bd1958581f..854347839d 100644 --- a/lib/iris/analysis/cartography.py +++ b/lib/iris/analysis/cartography.py @@ -347,11 +347,11 @@ def _quadrant_area(radian_lat_bounds, radian_lon_bounds, radius_of_earth): Parameters ---------- radian_lat_bounds : - [n,2] array of latitude bounds (radians) + [n,2] array of latitude bounds (radians). radian_lon_bounds : - [n,2] array of longitude bounds (radians) + [n,2] array of longitude bounds (radians). radius_of_earth : - radius of the earth (currently assumed spherical) + Radius of the earth (currently assumed spherical). """ # ensure pairs of bounds @@ -894,10 +894,12 @@ def _transform_xy(crs_from, x, y, crs_to): Parameters ---------- - crs_from, crs_to : :class:`cartopy.crs.Projection` + crs_from : :class:`cartopy.crs.Projection` The coordinate reference systems. x, y : array - point locations defined in 'crs_from'. + Point locations defined in 'crs_from'. + crs_to : :class:`cartopy.crs.Projection` + The coordinate reference systems. Returns ------- @@ -916,10 +918,13 @@ def _inter_crs_differentials(crs1, x, y, crs2): Parameters ---------- - crs1, crs2 : :class:`cartopy.crs.Projection` - The coordinate systems, "from" and "to". + crs1 : :class:`cartopy.crs.Projection` + The coordinate systems for "from". x, y : array Point locations defined in 'crs1'. + crs2 : :class:`cartopy.crs.Projection` + The coordinate systems for "to". + Returns ------- @@ -1047,7 +1052,7 @@ def _transform_distance_vectors_tolerance_mask(src_crs, x, y, tgt_crs, ds, dx2, tgt_crs : `cartopy.crs.Projection` The target coordinate reference systems. ds : `DistanceDifferential` - Distance differentials for src_crs and tgt_crs at specified locations + Distance differentials for src_crs and tgt_crs at specified locations. dx2, dy2 : `PartialDifferential` Partial differentials from src_crs to tgt_crs. diff --git a/lib/iris/analysis/maths.py b/lib/iris/analysis/maths.py index caf4aea0a8..80d3ead90c 100644 --- a/lib/iris/analysis/maths.py +++ b/lib/iris/analysis/maths.py @@ -222,7 +222,7 @@ def add(cube, other, dim=None, in_place=False): ---------- cube : iris.cube.Cube First operand to add. - other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array + other : iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array Second operand to add. dim : int, optional If `other` is a coord which does not exist on the cube, specify the @@ -274,7 +274,7 @@ def subtract(cube, other, dim=None, in_place=False): ---------- cube : iris.cube.Cube Cube from which to subtract. - other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array + other : iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array Object to subtract from the cube. dim : int, optional If `other` is a coord which does not exist on the cube, specify the @@ -323,22 +323,22 @@ def _add_subtract_common( Parameters ---------- operation_function : - function which does the operation (e.g. numpy.subtract) + Function which does the operation (e.g. numpy.subtract). operation_name : - The public name of the operation (e.g. 'divide') + The public name of the operation (e.g. 'divide'). cube : - The cube whose data is used as the first argument to `operation_function` + The cube whose data is used as the first argument to `operation_function`. other : The cube, coord, ndarray, dask array or number whose - data is used as the second argument + data is used as the second argument. new_dtype : The expected dtype of the output. Used in the case of scalar - masked arrays + masked arrays. dim : optional Dimension along which to apply `other` if it's a coordinate that is not - found in `cube` + found in `cube`. in_place : bool, default=False - Whether or not to apply the operation in place to `cube` and `cube.data` + Whether or not to apply the operation in place to `cube` and `cube.data`. """ _assert_is_cube(cube) @@ -381,7 +381,7 @@ def multiply(cube, other, dim=None, in_place=False): ---------- cube : iris.cube.Cube First operand to multiply. - other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array + other : iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array Second operand to multiply. dim : int, optional If `other` is a coord which does not exist on the cube, specify the @@ -466,7 +466,7 @@ def divide(cube, other, dim=None, in_place=False): ---------- cube : iris.cube.Cube Numerator. - other: iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array + other : iris.cube.Cube, iris.coords.Coord, number, numpy.ndarray or dask.array.Array Denominator. dim : int, optional If `other` is a coord which does not exist on the cube, specify the @@ -482,6 +482,7 @@ def divide(cube, other, dim=None, in_place=False): ----- This function maintains laziness when called; it does not realise data. See more at :doc:`/userguide/real_and_lazy_data`. + """ _assert_is_cube(cube) @@ -712,7 +713,7 @@ def apply_ufunc(ufunc, cube, other=None, new_unit=None, new_name=None, in_place= :func:`numpy.mod`. cube : An instance of :class:`iris.cube.Cube`. - other ::class:`iris.cube.Cube`, optional + other : :class:`iris.cube.Cube`, optional An instance of :class:`iris.cube.Cube` to be given as the second argument to :func:`numpy.ufunc`. new_unit : optional @@ -813,25 +814,25 @@ def _binary_op_common( Parameters ---------- operation_function : - Function which does the operation (e.g. numpy.divide) + Function which does the operation (e.g. numpy.divide). operation_name : - The public name of the operation (e.g. 'divide') + The public name of the operation (e.g. 'divide'). cube : - The cube whose data is used as the first argument to `operation_function` + The cube whose data is used as the first argument to `operation_function`. other : The cube, coord, ndarray, dask array or number whose data is used - as the second argument - new_dtype : - The expected dtype of the output. Used in the case of scalar masked arrays + as the second argument. new_unit : optional - Unit for the resulting quantity + Unit for the resulting quantity. + new_dtype : + The expected dtype of the output. Used in the case of scalar masked arrays. dim : optional Dimension along which to apply `other` if it's a coordinate that is - not found in `cube` + not found in `cube`. in_place : bool, default=False - whether or not to apply the operation in place to `cube` and `cube.data` + Whether or not to apply the operation in place to `cube` and `cube.data`. sanitise_metadata : bool, default=True - Whether or not to remove metadata using _sanitise_metadata function + Whether or not to remove metadata using _sanitise_metadata function. """ from iris.cube import Cube @@ -1169,13 +1170,13 @@ def __call__( other : optional A cube, coord, ndarray, dask array or number whose data is used as the second argument to the data function. - new_name : optional - Name for the resulting Cube. - in_place : bool, default=False - Whether to create a new Cube, or alter the given "cube". dim : optional Dimension along which to apply `other` if it's a coordinate that is - not found in `cube` + not found in `cube`. + in_place : bool, default=False + Whether to create a new Cube, or alter the given "cube". + new_name : optional + Name for the resulting Cube. **kwargs_data_func : Keyword arguments that get passed on to the data_func. diff --git a/lib/iris/analysis/trajectory.py b/lib/iris/analysis/trajectory.py index 1dd19cd724..2111dd2504 100644 --- a/lib/iris/analysis/trajectory.py +++ b/lib/iris/analysis/trajectory.py @@ -490,7 +490,7 @@ def _cartesian_sample_points(sample_points, sample_point_coord_names): [coord][datum] list of sample_positions for each datum, formatted for fast use of :func:`_ll_to_cart()`. sample_point_coord_names : - [coord] list of n coord names + [coord] list of n coord names. Returns ------- @@ -544,7 +544,7 @@ def _nearest_neighbour_indices_ndcoords(cube, sample_points, cache=None): Because this function can be slow for multidimensional coordinates, a 'cache' dictionary can be provided by the calling code. - .. Note:: + .. note:: If the points are longitudes/latitudes, these are handled correctly as points on the sphere, but the values must be in 'degrees'. @@ -745,7 +745,7 @@ def __init__(self, src_cube, target_grid_cube): Notes ----- - .. Note:: + .. note:: For latitude-longitude coordinates, the nearest-neighbour distances are computed on the sphere, otherwise flat Euclidean distances are diff --git a/lib/iris/aux_factory.py b/lib/iris/aux_factory.py index d63ab157fa..cd59575f93 100644 --- a/lib/iris/aux_factory.py +++ b/lib/iris/aux_factory.py @@ -57,7 +57,7 @@ def coord_system(self, value): @property def climatological(self): - """Always returns False, as a factory itself can never have points/bounds. + """Return False, as a factory itself can never have points/bounds. Always returns False, as a factory itself can never have points/bounds and therefore can never be climatological by definition. @@ -1787,9 +1787,9 @@ def _check_dependencies(s, c, eta, depth, depth_c): @property def dependencies(self): - """Returns a dicti mapping from constructor arg names to coordinates. + """Return a dicti mapping from constructor arg names to coordinates. - Returns a dictionary mapping from constructor argument names to + Return a dictionary mapping from constructor argument names to the corresponding coordinates. """ diff --git a/lib/iris/common/metadata.py b/lib/iris/common/metadata.py index 403436496f..3a5f4deede 100644 --- a/lib/iris/common/metadata.py +++ b/lib/iris/common/metadata.py @@ -734,7 +734,7 @@ def token(cls, name): Parameters ---------- name : str - The string name to verify + The string name to verify. Returns ------- diff --git a/lib/iris/common/resolve.py b/lib/iris/common/resolve.py index 5a96b52a02..aaf36b36cc 100644 --- a/lib/iris/common/resolve.py +++ b/lib/iris/common/resolve.py @@ -1639,7 +1639,7 @@ def _get_prepared_item( Boolean stating whether the ``metadata`` is from the ``src`` (``True``) or ``tgt`` :class:`~iris.cube.Cube`. Defaults to ``True``. - from_local: bool, default=False + from_local : bool, default=False Boolean controlling whether the ``metadata`` is used to search the ``category_local`` (``True``) or the :attr:`~iris.common.resolve.Resolve.prepared_category`. Defaults to ``False``. diff --git a/lib/iris/coord_categorisation.py b/lib/iris/coord_categorisation.py index f4c3aa6cb4..7ccee4fca8 100644 --- a/lib/iris/coord_categorisation.py +++ b/lib/iris/coord_categorisation.py @@ -34,14 +34,15 @@ def add_categorised_coord(cube, name, from_coord, category_function, units="1"): cube : :class:`iris.cube.Cube` The cube containing 'from_coord'. The new coord will be added into it. name : str - name of the created coordinate + Name of the created coordinate. from_coord : :class:`iris.coords.Coord` or str - coordinate in 'cube', or the name of one + Coordinate in 'cube', or the name of one. category_function : callable - function(coordinate, value), returning a category value for a - coordinate point-value + Function(coordinate, value), returning a category value for a + coordinate point-value. units : str, default="1" - units of the category value, typically 'no_unit' or '1'. + Units of the category value, typically 'no_unit' or '1'. + """ # Interpret coord, if given as a name if isinstance(from_coord, str): @@ -91,9 +92,9 @@ def _pt_date(coord, time): Parameters ---------- coord : Coord - coordinate (must be Time-type) + Coordinate (must be Time-type). time : float - value of a coordinate point + Value of a coordinate point. Returns ------- @@ -385,7 +386,7 @@ def add_season_year( List of seasons defined by month abbreviations. Each month must appear once and only once. Defaults to standard meteorological seasons (``djf``, ``mam``, ``jja``, ``son``). - use_year_at_season_start: bool, default=False + use_year_at_season_start : bool, default=False Seasons spanning the year boundary (e.g. Winter ``djf``) will belong fully to the following year by default (e.g. the year of Jan and Feb). Set to ``True`` for spanning seasons to belong to the preceding diff --git a/lib/iris/coord_systems.py b/lib/iris/coord_systems.py index 35eea98764..96f39c3f4b 100644 --- a/lib/iris/coord_systems.py +++ b/lib/iris/coord_systems.py @@ -161,9 +161,9 @@ def __init__( Axes of ellipsoid, in metres. At least one must be given (see note below). inverse_flattening : optional - Can be omitted if both axes given (see note below). Default 0.0 + Can be omitted if both axes given (see note below). Default 0.0. longitude_of_prime_meridian : optional - Specifies the prime meridian on the ellipsoid, in degrees. Default 0.0 + Specifies the prime meridian on the ellipsoid, in degrees. Default 0.0. Notes ----- @@ -357,14 +357,16 @@ def _crs(self): This property is created when required and then cached for speed. That cached value is cleared when an assignment is made to a property of the class that invalidates the cache. + """ return ccrs.Geodetic(self._globe) def _wipe_cached_properties(self): - """Wipes the cached properties on the object. + """Wipe the cached properties on the object. - Wipes the cached properties on the object as part of any update to a + Wipe the cached properties on the object as part of any update to a value that invalidates the cache. + """ try: delattr(self, "_crs") @@ -990,10 +992,10 @@ def __init__( Y offset from planar origin in metres. true_scale_lat : float, optional Latitude of true scale. - scale_factor_at_projection_origin : float, optional - Scale factor at the origin of the projection ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. + scale_factor_at_projection_origin : float, optional + Scale factor at the origin of the projection. Notes ----- @@ -1098,7 +1100,7 @@ def __init__( true_scale_lat : float, optional Latitude of true scale. scale_factor_at_projection_origin : float, optional - Scale factor at the origin of the projection + Scale factor at the origin of the projection. ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. @@ -1502,7 +1504,8 @@ class ObliqueMercator(CoordSystem): See Also -------- - :class:`RotatedMercator` + RotatedMercator : + :class:`ObliqueMercator` with ``azimuth_of_central_line=90``. """ @@ -1527,15 +1530,15 @@ def __init__( the centre line. latitude_of_projection_origin : float The true longitude of the central meridian in degrees. - longitude_of_projection_origin: float + longitude_of_projection_origin : float The true latitude of the planar origin in degrees. - false_easting: float, optional + false_easting : float, optional X offset from the planar origin in metres. Defaults to 0.0. - false_northing: float, optional + false_northing : float, optional Y offset from the planar origin in metres. Defaults to 0.0. - scale_factor_at_projection_origin: float, optional + scale_factor_at_projection_origin : float, optional Scale factor at the central meridian. Defaults to 1.0 . ellipsoid : :class:`GeogCS`, optional @@ -1615,6 +1618,8 @@ class RotatedMercator(ObliqueMercator): The Rotated Mercator projection is an Oblique Mercator projection with azimuth = +90. + Notes + ----- .. deprecated:: 3.8.0 This coordinate system was introduced as already scheduled for removal in a future release, since CF version 1.11 onwards now requires use of @@ -1639,18 +1644,18 @@ def __init__( ---------- latitude_of_projection_origin : float The true longitude of the central meridian in degrees. - longitude_of_projection_origin: float + longitude_of_projection_origin : float The true latitude of the planar origin in degrees. - false_easting: float, optional + false_easting : float, optional X offset from the planar origin in metres. Defaults to 0.0. - false_northing: float, optional + false_northing : float, optional Y offset from the planar origin in metres. Defaults to 0.0. - scale_factor_at_projection_origin: float, optional + scale_factor_at_projection_origin : float, optional Scale factor at the central meridian. Defaults to 1.0 . - ellipsoid: :class:`GeogCS`, optional + ellipsoid : :class:`GeogCS`, optional If given, defines the ellipsoid. """ diff --git a/lib/iris/coords.py b/lib/iris/coords.py index d9de063ea3..e32c6b0bf0 100644 --- a/lib/iris/coords.py +++ b/lib/iris/coords.py @@ -289,7 +289,7 @@ def summary( Returns ------- - result : str + str Output text, with embedded newlines when :attr:`shorten`\ =False. Notes @@ -1168,12 +1168,12 @@ def _get_2d_coord_bound_grid(bounds): Parameters ---------- bounds : array - Coordinate bounds array of shape (Y, X, 4) + Coordinate bounds array of shape (Y, X, 4). Returns ------- array - Grid of shape (Y+1, X+1) + Grid of shape (Y+1, X+1). """ # Check bds has the shape (ny, nx, 4) @@ -1952,7 +1952,7 @@ def is_contiguous(self, rtol=1e-05, atol=1e-08): contiguous = False return contiguous - def contiguous_bounds(self): + def contiguous_bounds(self): # numpydoc ignore=SS05 """Contiguous bounds of 1D coordinate. Return the N+1 bound values for a contiguous bounded 1D coordinate @@ -2614,7 +2614,7 @@ def __init__( #: Whether the coordinate wraps by ``coord.units.modulus``. self.circular = circular - def __deepcopy__(self, memo): + def __deepcopy__(self, memo): # numpydoc ignore=SS02 """coord.__deepcopy__() -> Deep copy of coordinate. Used if copy.deepcopy is called on a coordinate. diff --git a/lib/iris/cube.py b/lib/iris/cube.py index e77646993e..ca3039cb5b 100644 --- a/lib/iris/cube.py +++ b/lib/iris/cube.py @@ -169,14 +169,14 @@ def _repr_html_(self): def __add__(self, other): return CubeList(list.__add__(self, other)) - def __getitem__(self, keys): + def __getitem__(self, keys): # numpydoc ignore=SS02 """x.__getitem__(y) <==> x[y].""" result = super().__getitem__(keys) if isinstance(result, list): result = CubeList(result) return result - def __getslice__(self, start, stop): + def __getslice__(self, start, stop): # numpydoc ignore=SS02 """x.__getslice__(i, j) <==> x[i:j]. Use of negative indices is not supported. @@ -270,7 +270,8 @@ def extract_cube(self, constraint): See Also -------- - :meth:`~iris.cube.CubeList.extract` + iris.cube.CubeList.extract : + Filter each of the cubes which can be filtered by the given constraints. """ # Just validate this, so we can accept strings etc, but not multiples. @@ -293,7 +294,8 @@ def extract_cubes(self, constraints): See Also -------- - :meth:`~iris.cube.CubeList.extract` + iris.cube.CubeList.extract : + Filter each of the cubes which can be filtered by the given constraints. """ return self._extract_and_merge( @@ -717,7 +719,7 @@ def realise_data(self): # Compute these stats together (avoiding multiple data passes). CubeList([a_std, b_std, ab_mean_diff, std_err]).realise_data() - .. Note:: + .. note:: Cubes with non-lazy data are not affected. @@ -804,7 +806,7 @@ def __init__( Parameters ---------- combined : dict - values to init both 'self.globals' and 'self.locals'. If 'combined' itself + Values to init both 'self.globals' and 'self.locals'. If 'combined' itself has attributes named 'locals' and 'globals', these are used to update the respective content (after initially setting the individual ones). Otherwise, 'combined' is treated as a generic mapping, applied as @@ -812,9 +814,9 @@ def __init__( i.e. it will set locals and/or globals with the same logic as :meth:`~iris.cube.CubeAttrsDict.__setitem__` . locals : dict - initial content for 'self.locals' + Initial content for 'self.locals'. globals : dict - initial content for 'self.globals' + Initial content for 'self.globals'. Examples -------- @@ -1126,7 +1128,7 @@ def _sort_xml_attrs(cls, doc): Parameters ---------- - doc : :class:`xml.dom.minidom.Document`. + doc : :class:`xml.dom.minidom.Document` Returns ------- @@ -1209,7 +1211,7 @@ def __init__( units : optional The unit of the cube, e.g. ``"m s-1"`` or ``"kelvin"``. attributes : optional - A dictionary of cube attributes + A dictionary of cube attributes. cell_methods : optional A tuple of CellMethod objects, generally set by Iris, e.g. ``(CellMethod("mean", coords='latitude'), )``. @@ -1485,7 +1487,8 @@ def add_aux_coord(self, coord, data_dims=None): See Also -------- - :meth:`Cube.remove_coord()`. + remove_coord : + Remove a coordinate from the cube. """ if self.coords(coord): # TODO: just fail on duplicate object @@ -1626,7 +1629,8 @@ def add_cell_measure(self, cell_measure, data_dims=None): See Also -------- - :meth:`Cube.remove_cell_measure()`. + remove_cell_measure : + Remove a cell measure from the cube. """ if self.cell_measures(cell_measure): @@ -1646,7 +1650,7 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None): ---------- ancillary_variable : The :class:`iris.coords.AncillaryVariable` instance to be added to - the cube + the cube. data_dims : optional Integer or iterable of integers giving the data dimensions spanned by the ancillary variable. @@ -1656,6 +1660,7 @@ def add_ancillary_variable(self, ancillary_variable, data_dims=None): ValueError Raises a ValueError if an ancillary variable with identical metadata already exists on the cube. + """ if self.ancillary_variables(ancillary_variable): raise iris.exceptions.CannotAddError( @@ -1687,7 +1692,8 @@ def add_dim_coord(self, dim_coord, data_dim): See Also -------- - :meth:`Cube.remove_coord()`. + remove_coord : + Remove a coordinate from the cube. """ if self.coords(dim_coord): @@ -1767,8 +1773,10 @@ def remove_coord(self, coord): See Also -------- - :meth:`Cube.add_dim_coord()` - :meth:`Cube.add_aux_coord()` + add_dim_coord : + Add a CF coordinate to the cube. + add_aux_coord : + Add a CF auxiliary coordinate to the cube. """ coord = self.coord(coord) @@ -1795,15 +1803,14 @@ def remove_cell_measure(self, cell_measure): Notes ----- - .. note:: - - If the argument given does not represent a valid cell_measure on - the cube, an :class:`iris.exceptions.CellMeasureNotFoundError` - is raised. + If the argument given does not represent a valid cell_measure on + the cube, an :class:`iris.exceptions.CellMeasureNotFoundError` + is raised. See Also -------- - :meth:`Cube.add_cell_measure()` + add_cell_measure : + Add a CF cell measure to the cube. """ cell_measure = self.cell_measure(cell_measure) @@ -2092,7 +2099,8 @@ def coords( See Also -------- - :meth:`Cube.coord` for matching exactly one coordinate. + coord : + For matching exactly one coordinate. """ @@ -2255,7 +2263,9 @@ def coord( See Also -------- - :meth:`Cube.coords` for matching zero or more coordinates. + coords : + For matching zero or more coordinates. + """ coords = self.coords( name_or_coord=name_or_coord, @@ -2365,7 +2375,7 @@ def mesh(self): Returns ------- - mesh : :class:`iris.experimental.ugrid.mesh.Mesh` or None + :class:`iris.experimental.ugrid.mesh.Mesh` or None The mesh of the cube :class:`~iris.experimental.ugrid.MeshCoord`'s, or ``None``. @@ -2386,7 +2396,7 @@ def location(self): Returns ------- - location : str or None + str or None The mesh location of the cube :class:`~iris.experimental.ugrid.MeshCoords` (i.e. one of 'face' / 'edge' / 'node'), or ``None``. @@ -2406,7 +2416,7 @@ def mesh_dim(self): Returns ------- - mesh_dim : int or None + int or None The cube dimension which the cube :class:`~iris.experimental.ugrid.MeshCoord` map to, or ``None``. @@ -2435,7 +2445,8 @@ def cell_measures(self, name_or_cell_measure=None): See Also -------- - :meth:`Cube.cell_measure()`. + cell_measure : + Return a single cell_measure. """ name = None @@ -2469,7 +2480,7 @@ def cell_measure(self, name_or_cell_measure=None): See Also -------- - :meth:`Cube.cell_measures()` + cell_measures : For full keyword documentation. """ @@ -2522,7 +2533,8 @@ def ancillary_variables(self, name_or_ancillary_variable=None): See Also -------- - :meth:`Cube.ancillary_variable()`. + ancillary_variable : + Return a single ancillary_variable. """ name = None @@ -2556,7 +2568,7 @@ def ancillary_variable(self, name_or_ancillary_variable=None): See Also -------- - :meth:`Cube.ancillary_variables()` + ancillary_variables : For full keyword documentation. """ @@ -2716,7 +2728,7 @@ def data(self, data): self._data_manager.data = data def has_lazy_data(self): - """Details whether this :class:`~iris.cube.Cube` has lazy data. + """Detail whether this :class:`~iris.cube.Cube` has lazy data. Returns ------- @@ -3000,7 +3012,7 @@ def intersection(self, *args, **kwargs): ---------- coord : Either a :class:`iris.coords.Coord`, or coordinate name - (as defined in :meth:`iris.cube.Cube.coords()`) + (as defined in :meth:`iris.cube.Cube.coords()`). minimum : The minimum value of the range to select. maximum : @@ -3008,7 +3020,7 @@ def intersection(self, *args, **kwargs): min_inclusive : If True, coordinate values equal to `minimum` will be included in the selection. Default is True. - max_inclusive: + max_inclusive : If True, coordinate values equal to `maximum` will be included in the selection. Default is True. ignore_bounds : optional @@ -3380,7 +3392,7 @@ def slices_over(self, ref_to_slice): Parameters ---------- - ref_to_slice: str, coord, dimension index or a list of these + ref_to_slice : str, coord, dimension index or a list of these Determines which dimensions will be iterated along (i.e. the dimensions that are not returned in the subcubes). A mix of input types can also be provided. @@ -3406,7 +3418,8 @@ def slices_over(self, ref_to_slice): See Also -------- - :meth:`iris.cube.Cube.slices`. + iris.cube.Cube.slices : + Return an iterator of all subcubes given the coordinates or dimension indices. """ # Required to handle a mix between types. @@ -3450,7 +3463,7 @@ def slices(self, ref_to_slice, ordered=True): A mix of input types can also be provided. They must all be orthogonal (i.e. point to different dimensions). ordered : bool, default=True - if True, the order which the coords to slice or data_dims + If True, the order which the coords to slice or data_dims are given will be the order in which they represent the data in the resulting cube slices. If False, the order will follow that of the source cube. Default is True. @@ -3469,7 +3482,8 @@ def slices(self, ref_to_slice, ordered=True): See Also -------- - :meth:`iris.cube.Cube.slices_over`. + iris.cube.Cube.slices : + Return an iterator of all subcubes given the coordinates or dimension indices. """ if not isinstance(ordered, bool): diff --git a/lib/iris/experimental/animate.py b/lib/iris/experimental/animate.py index 5c9fa77bf8..13c1613802 100644 --- a/lib/iris/experimental/animate.py +++ b/lib/iris/experimental/animate.py @@ -8,15 +8,15 @@ ----- .. deprecated:: 3.4.0 -``iris.experimental.animate.animate()`` has been moved to -:func:`iris.plot.animate`. This module will therefore be removed in a future -release. + ``iris.experimental.animate.animate()`` has been moved to + :func:`iris.plot.animate`. This module will therefore be removed in a future + release. """ def animate(cube_iterator, plot_func, fig=None, **kwargs): - """Animates the given cube iterator. + """Animate the given cube iterator. Warnings -------- diff --git a/lib/iris/experimental/raster.py b/lib/iris/experimental/raster.py index ba7efc68b0..52ef2f651b 100644 --- a/lib/iris/experimental/raster.py +++ b/lib/iris/experimental/raster.py @@ -58,7 +58,7 @@ def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, f coord_system : iris.coord_systems.CoordSystem Coordinate system for X and Y. data : numpy.ndarray - 2d array of values to export + 2d array of values to export. fname : str Output file name. ftype : str @@ -66,9 +66,7 @@ def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, f Notes ----- - .. note:: - - Projection information is currently not written to the output. + Projection information is currently not written to the output. """ byte_order = data.dtype.str[0] @@ -109,14 +107,6 @@ def _gdal_write_array(x_min, x_step, y_max, y_step, coord_system, data, fname, f def export_geotiff(cube, fname): """Write cube data to raster file format as a PixelIsArea GeoTiff image. - .. deprecated:: 3.2.0 - - This method is scheduled to be removed in a future release, and no - replacement is currently planned. - If you make use of this functionality, please contact the Iris - Developers to discuss how to retain it (which could include reversing - the deprecation). - Parameters ---------- cube : Cube @@ -127,10 +117,16 @@ def export_geotiff(cube, fname): Notes ----- - .. note:: + For more details on GeoTiff specification and PixelIsArea, see: + https://www.remotesensing.org/geotiff/spec/geotiff2.5.html#2.5.2.2 + + .. deprecated:: 3.2.0 - For more details on GeoTiff specification and PixelIsArea, see: - https://www.remotesensing.org/geotiff/spec/geotiff2.5.html#2.5.2.2 + This method is scheduled to be removed in a future release, and no + replacement is currently planned. + If you make use of this functionality, please contact the Iris + Developers to discuss how to retain it (which could include reversing + the deprecation). """ wmsg = ( diff --git a/lib/iris/experimental/regrid.py b/lib/iris/experimental/regrid.py index c1d209cac0..835aa51368 100644 --- a/lib/iris/experimental/regrid.py +++ b/lib/iris/experimental/regrid.py @@ -4,9 +4,9 @@ # See LICENSE in the root of the repository for full licensing details. """Regridding functions. -.. note:: - - .. deprecated:: 3.2.0 +Notes +----- +.. deprecated:: 3.2.0 This package will be removed in a future release. The PointInCell class has now moved to :class:`iris.analysis.PointInCell`. @@ -57,33 +57,12 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): mean of data values from src_grid regridded onto the horizontal grid of grid_cube. - .. note:: - - .. deprecated:: 3.2.0 - - This function is scheduled to be removed in a future release. - Please use :meth:`iris.cube.Cube.regrid` with the - :class:`iris.analysis.AreaWeighted` scheme instead : this is an exact - replacement. - - For example : - - .. code:: - - result = src_cube.regrid(grid_cube, AreaWeighted()) - This function requires that the horizontal grids of both cubes are rectilinear (i.e. expressed in terms of two orthogonal 1D coordinates) and that these grids are in the same coordinate system. This function also requires that the coordinates describing the horizontal grids all have bounds. - .. note:: - - Elements in data array of the returned cube that lie either partially - or entirely outside of the horizontal extent of the src_cube will - be masked irrespective of the value of mdtol. - Parameters ---------- src_cube : :class:`iris.cube.Cube` @@ -105,6 +84,25 @@ def regrid_area_weighted_rectilinear_src_and_grid(src_cube, grid_cube, mdtol=0): ------- A new :class:`iris.cube.Cube` instance. + Notes + ----- + Elements in data array of the returned cube that lie either partially + or entirely outside of the horizontal extent of the src_cube will + be masked irrespective of the value of mdtol. + + .. deprecated:: 3.2.0 + + This function is scheduled to be removed in a future release. + Please use :meth:`iris.cube.Cube.regrid` with the + :class:`iris.analysis.AreaWeighted` scheme instead : this is an exact + replacement. + + For example : + + .. code:: + + result = src_cube.regrid(grid_cube, AreaWeighted()) + """ wmsg = ( "The function " @@ -131,21 +129,6 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): mean of data values from :data:`src_cube` and the weights from :data:`weights` regridded onto the horizontal grid of :data:`grid_cube`. - .. note :: - - .. deprecated:: 3.2.0 - - This function is scheduled to be removed in a future release. - Please use :meth:`iris.cube.Cube.regrid` with the - :class:`iris.analysis.PointInCell` scheme instead : this is an exact - replacement. - - For example : - - .. code:: - - result = src_cube.regrid(grid_cube, PointInCell()) - This function requires that the :data:`src_cube` has a horizontal grid defined by a pair of X- and Y-axis coordinates which are mapped over the same cube dimensions, thus each point has an individually defined X and @@ -165,10 +148,10 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): :math:`\sum (src\_cube.data_{ij} * weights_{ij}) / \sum weights_{ij}`, for all :math:`ij` :data:`src_cube` points that are bound by that cell. - .. warning:: - - All coordinates that span the :data:`src_cube` that don't define - the horizontal curvilinear grid will be ignored. + Warnings + -------- + All coordinates that span the :data:`src_cube` that don't define + the horizontal curvilinear grid will be ignored. Parameters ---------- @@ -187,6 +170,21 @@ def regrid_weighted_curvilinear_to_rectilinear(src_cube, weights, grid_cube): ------- A :class:`iris.cube.Cube` instance. + Notes + ----- + .. deprecated:: 3.2.0 + + This function is scheduled to be removed in a future release. + Please use :meth:`iris.cube.Cube.regrid` with the + :class:`iris.analysis.PointInCell` scheme instead : this is an exact + replacement. + + For example : + + .. code:: + + result = src_cube.regrid(grid_cube, PointInCell()) + """ wmsg = ( "The function " @@ -341,7 +339,7 @@ def _regrid( projection, method, ): - """Regrids input data from the source to the target. Calculation is.""" + """Regrid input data from the source to the target. Calculation is.""" # Transform coordinates into the projection the interpolation will be # performed in. src_projection = src_x_coord.coord_system.as_cartopy_projection() @@ -622,9 +620,16 @@ def __init__(self, projection=None): Linear regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. - .. note:: + Parameters + ---------- + projection : `cartopy.crs` instance, optional + The projection that the scipy calculation is performed in. + If None is given, a PlateCarree projection is used. Defaults to + None. - .. deprecated:: 3.2.0 + Notes + ----- + .. deprecated:: 3.2.0 This class is scheduled to be removed in a future release, and no replacement is currently planned. @@ -632,13 +637,6 @@ def __init__(self, projection=None): Developers to discuss how to retain it (which could include reversing the deprecation). - Parameters - ---------- - projection : `cartopy.crs instance`, optional - The projection that the scipy calculation is performed in. - If None is given, a PlateCarree projection is used. Defaults to - None. - """ self.projection = projection wmsg = ( @@ -696,7 +694,7 @@ class ProjectedUnstructuredNearest: The source cube and the target cube will be projected into a common projection for the scipy calculation to be performed. - .. Note:: + .. note:: The :class:`iris.analysis.UnstructuredNearest` scheme performs essentially the same job. That calculation is more rigorously correct and may be applied to larger data regions (including global). @@ -707,9 +705,16 @@ class ProjectedUnstructuredNearest: def __init__(self, projection=None): """Nearest regridding scheme that uses scipy.interpolate.griddata on projected unstructured data. - .. note:: + Parameters + ---------- + projection : `cartopy.crs instance`, optional + The projection that the scipy calculation is performed in. + If None is given, a PlateCarree projection is used. Defaults to + None. - .. deprecated:: 3.2.0 + Notes + ----- + .. deprecated:: 3.2.0 This class is scheduled to be removed in a future release, and no exact replacement is currently planned. @@ -718,13 +723,6 @@ def __init__(self, projection=None): contact the Iris Developers to discuss how to retain it (which could include reversing the deprecation). - Parameters - ---------- - projection : `cartopy.crs instance`, optional - The projection that the scipy calculation is performed in. - If None is given, a PlateCarree projection is used. Defaults to - None. - """ self.projection = projection wmsg = ( diff --git a/lib/iris/experimental/regrid_conservative.py b/lib/iris/experimental/regrid_conservative.py index e15b1c29a5..c4dbf965f8 100644 --- a/lib/iris/experimental/regrid_conservative.py +++ b/lib/iris/experimental/regrid_conservative.py @@ -4,9 +4,7 @@ # See LICENSE in the root of the repository for full licensing details. """Support for conservative regridding via ESMPy. -.. note:: - - .. deprecated:: 3.2.0 +.. deprecated:: 3.2.0 This package will be removed in a future release. Please use @@ -143,22 +141,6 @@ def _make_esmpy_field(x_coord, y_coord, ref_name="field", data=None, mask=None): def regrid_conservative_via_esmpy(source_cube, grid_cube): """Perform a conservative regridding with ESMPy. - .. note :: - - .. deprecated:: 3.2.0 - - This function is scheduled to be removed in a future release. - Please use - `iris-esmf-regrid `_ - instead. - - For example : - - .. code:: - - from emsf_regrid.schemes import ESMFAreaWeighted - result = src_cube.regrid(grid_cube, ESMFAreaWeighted()) - Regrids the data of a source cube onto a new grid defined by a destination cube. @@ -185,13 +167,10 @@ def regrid_conservative_via_esmpy(source_cube, grid_cube): the reference surfaces are also regridded, using ordinary bilinear interpolation. - .. note:: - - Both source and destination cubes must have two dimension coordinates - identified with axes 'X' and 'Y' which share a coord_system with a - Cartopy CRS. - The grids are defined by :meth:`iris.coords.Coord.contiguous_bounds` of - these. + Both source and destination cubes must have two dimension coordinates + identified with axes 'X' and 'Y' which share a coord_system with a + Cartopy CRS. The grids are defined by :meth:`iris.coords.Coord.contiguous_bounds` + of these. .. note:: @@ -200,6 +179,20 @@ def regrid_conservative_via_esmpy(source_cube, grid_cube): To alter this, make a prior call to ESMF.Manager(). + .. deprecated:: 3.2.0 + + This function is scheduled to be removed in a future release. + Please use + `iris-esmf-regrid `_ + instead. + + For example : + + .. code:: + + from emsf_regrid.schemes import ESMFAreaWeighted + result = src_cube.regrid(grid_cube, ESMFAreaWeighted()) + """ wmsg = ( "The function " diff --git a/lib/iris/experimental/representation.py b/lib/iris/experimental/representation.py index 4ffe176e3a..0648cc8e0d 100644 --- a/lib/iris/experimental/representation.py +++ b/lib/iris/experimental/representation.py @@ -209,13 +209,13 @@ def _make_row(self, title, body=None, col_span=0): Parameters ---------- - body : str, optional - Contains the content for each cell not in the left-most (title) column. - If None, indicates this row is a title row (see below). - title : stre, optional + title : str, optional Contains the row heading. If `body` is None, indicates that the row contains a sub-heading; e.g. 'Dimension coordinates:'. + body : str, optional + Contains the content for each cell not in the left-most (title) column. + If None, indicates this row is a title row (see below). col_span : int, default=0 Indicates how many columns the string should span. diff --git a/lib/iris/experimental/ugrid/mesh.py b/lib/iris/experimental/ugrid/mesh.py index a2519f250b..db000b5c73 100644 --- a/lib/iris/experimental/ugrid/mesh.py +++ b/lib/iris/experimental/ugrid/mesh.py @@ -933,7 +933,7 @@ def summary(self, shorten=False): Returns ------- - result : str + str """ if shorten: @@ -1888,7 +1888,7 @@ def to_MeshCoords(self, location): Returns ------- tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` - tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` + Tuple of :class:`~iris.experimental.ugrid.mesh.MeshCoord` referencing the current :class:`Mesh`. One for each value in :attr:`AXES`, using the value for the ``axis`` argument. @@ -2957,8 +2957,9 @@ def _construct_access_arrays(self): Returns ------- - points, bounds : array or None - lazy arrays which calculate the correct points and bounds from the + array or None + Tuple of (points, bounds). + Lazy arrays which calculate the correct points and bounds from the Mesh data, based on the location and axis. The Mesh coordinates accessed are not identified on construction, but discovered from the Mesh at the time of calculation, so that diff --git a/lib/iris/fileformats/_ff.py b/lib/iris/fileformats/_ff.py index 18c1aa766e..35b4f65bb7 100644 --- a/lib/iris/fileformats/_ff.py +++ b/lib/iris/fileformats/_ff.py @@ -841,9 +841,9 @@ def load_cubes(filenames, callback, constraints=None): Parameters ---------- filenames : - List of fields files filenames to load + List of fields files filenames to load. callback : - A function which can be passed on to :func:`iris.io.run_callback` + A function which can be passed on to :func:`iris.io.run_callback`. Notes ----- @@ -864,8 +864,8 @@ def load_cubes_32bit_ieee(filenames, callback, constraints=None): See Also -------- - :func:`load_cubes` - For keyword details + :func:`load_cubes` : + For keyword details. """ return pp._load_cubes_variable_loader( diff --git a/lib/iris/fileformats/_ff_cross_references.py b/lib/iris/fileformats/_ff_cross_references.py index b060ed42e9..6e9ce16363 100644 --- a/lib/iris/fileformats/_ff_cross_references.py +++ b/lib/iris/fileformats/_ff_cross_references.py @@ -2,9 +2,9 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -""" -Table providing UM grid-code, field-code and pseudolevel-type for (some) -stash codes. Used in UM file i/o. +"""Table providing UM grid-code, field-code and pseudolevel-type for (some) stash codes. + +Used in UM file i/o. """ diff --git a/lib/iris/fileformats/_nc_load_rules/helpers.py b/lib/iris/fileformats/_nc_load_rules/helpers.py index dc68274a36..f6e3985e37 100644 --- a/lib/iris/fileformats/_nc_load_rules/helpers.py +++ b/lib/iris/fileformats/_nc_load_rules/helpers.py @@ -282,7 +282,7 @@ def _split_cell_methods(nc_cell_methods: str) -> List[re.Match]: Returns ------- nc_cell_methods_matches: list of re.Match objects - A list of re.Match objects associated with each parsed cell method + A list of re.Match objects associated with each parsed cell method. Notes ----- diff --git a/lib/iris/fileformats/_structured_array_identification.py b/lib/iris/fileformats/_structured_array_identification.py index 8dada77458..0f386e9815 100644 --- a/lib/iris/fileformats/_structured_array_identification.py +++ b/lib/iris/fileformats/_structured_array_identification.py @@ -300,7 +300,7 @@ class GroupStructure: """ def __init__(self, length, component_structure, array_order="c"): - """group_component_to_array - a dictionary. See also TODO.""" + """Group_component_to_array - a dictionary. See also TODO.""" #: The size common to all of the original arrays and used to determine #: possible shape configurations. self.length = length diff --git a/lib/iris/fileformats/abf.py b/lib/iris/fileformats/abf.py index 6dd8dfd14f..1ac95a42eb 100644 --- a/lib/iris/fileformats/abf.py +++ b/lib/iris/fileformats/abf.py @@ -200,9 +200,9 @@ def load_cubes(filespecs, callback=None): Parameters ---------- filenames : - List of ABF filenames to load + List of ABF filenames to load. callback : optional - A function that can be passed to :func:`iris.io.run_callback` + A function that can be passed to :func:`iris.io.run_callback`. Notes ----- diff --git a/lib/iris/fileformats/dot.py b/lib/iris/fileformats/dot.py index 53f85794c6..8405368ade 100644 --- a/lib/iris/fileformats/dot.py +++ b/lib/iris/fileformats/dot.py @@ -57,13 +57,14 @@ def save(cube, target): Parameters ---------- - cube: :class:`iris.cube.Cube`. + cube : :class:`iris.cube.Cube` target : A filename or open file handle. See Also -------- - :func:`iris.io.save`. + iris.io.save : + Save one or more Cubes to file (or other writeable). """ if isinstance(target, str): @@ -90,7 +91,7 @@ def save_png(source, target, launch=False): Parameters ---------- - source: :class:`iris.cube.Cube`, or dot filename. + source : :class:`iris.cube.Cube`, or dot filename target : A filename or open file handle. If passing a file handle, take care to open it for binary output. @@ -99,7 +100,8 @@ def save_png(source, target, launch=False): See Also -------- - :func:`iris.io.save`. + iris.io.save : + Save one or more Cubes to file (or other writeable). """ # From cube or dot file? diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py index 9aafbf312d..b87ffde145 100644 --- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py +++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py @@ -50,7 +50,7 @@ def is_contained_type(cls, instance): @classmethod def from_existing(cls, instance): - """Pass an existing instance to __init__, where it is contained.""" + """Routine to pass an existing instance to __init__, where it is contained.""" assert cls.is_contained_type(instance) return cls(instance) @@ -122,7 +122,7 @@ def setncattr(self, *args, **kwargs) -> None: @property def dimensions(self) -> typing.List[str]: - """Calls netCDF4.Variable.dimensions within _GLOBAL_NETCDF4_LOCK. + """Call netCDF4.Variable.dimensions within _GLOBAL_NETCDF4_LOCK. Only defined explicitly in order to get some mocks to work. """ @@ -162,7 +162,7 @@ class GroupWrapper(_ThreadSafeWrapper): @property def dimensions(self) -> typing.Dict[str, DimensionWrapper]: - """Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Call dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers. The original returned netCDF4.Dimensions @@ -191,7 +191,7 @@ def createDimension(self, *args, **kwargs) -> DimensionWrapper: @property def variables(self) -> typing.Dict[str, VariableWrapper]: - """Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Call variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers. The original returned netCDF4.Variables @@ -238,7 +238,7 @@ def get_variables_by_attributes( @property def groups(self): - """Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Call groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrappers. @@ -253,7 +253,7 @@ def groups(self): @property def parent(self): - """Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. + """Call parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK. Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning a GroupWrapper. @@ -261,6 +261,7 @@ def parent(self): The original returned netCDF4.Group is simply replaced with its respective GroupWrapper, ensuring that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK. + """ with _GLOBAL_NETCDF4_LOCK: parent_ = self._contained_instance.parent diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py index 4e205ad7f3..ca2d830281 100644 --- a/lib/iris/fileformats/netcdf/loader.py +++ b/lib/iris/fileformats/netcdf/loader.py @@ -515,9 +515,10 @@ def _translate_constraints_to_var_callback(constraints): Returns ------- - function : (cf_var:CFDataVariable) - bool, or None. + bool or None + Notes + ----- For now, ONLY handles a single NameConstraint with no 'STASH' component. """ @@ -705,7 +706,7 @@ def set( Parameters ---------- var_names : str or list of str, default=None - apply the `dimension_chunksizes` controls only to these variables, + Apply the `dimension_chunksizes` controls only to these variables, or when building :class:`~iris.cube.Cube` from these data variables. If ``None``, settings apply to all loaded variables. **dimension_chunksizes : dict of {str: int} @@ -794,7 +795,7 @@ def from_file(self) -> None: @contextmanager def as_dask(self) -> None: - """Relies on Dask :external+dask:doc:`array` to control chunk sizes. + """Rely on Dask :external+dask:doc:`array` to control chunk sizes. Notes ----- diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py index 42616d7fd1..7c24be0a6d 100644 --- a/lib/iris/fileformats/netcdf/saver.py +++ b/lib/iris/fileformats/netcdf/saver.py @@ -297,10 +297,10 @@ def _data_fillvalue_check(arraylib, data, check_value): arraylib : module Either numpy or dask.array : When dask, results are lazy computations. data : array-like - Array to check (numpy or dask) + Array to check (numpy or dask). check_value : number or None If not None, fill-value to check for existence in the array. - If None, do not do value-in-array check + If None, do not do value-in-array check. Returns ------- @@ -335,18 +335,18 @@ def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False): Parameters ---------- fill_info : _FillvalueCheckInfo - A named-tuple containing the context of the fill-value check + A named-tuple containing the context of the fill-value check. is_masked : bool - whether the data array was masked + Whether the data array was masked. contains_fill_value : bool - whether the data array contained the fill-value + Whether the data array contained the fill-value. warn : bool, default=False - if True, also issue any resulting warning immediately. + If True, also issue any resulting warning immediately. Returns ------- None or :class:`Warning` - If not None, indicates a known or possible problem with filling + If not None, indicates a known or possible problem with filling. """ varname = fill_info.varname @@ -844,21 +844,22 @@ def _add_mesh(self, cube_or_mesh): Add the cube's mesh, and all related variables to the dataset. Includes all the mesh-element coordinate and connectivity variables. - ..note:: + .. note:: Here, we do *not* add the relevant referencing attributes to the data-variable, because we want to create the data-variable later. Parameters ---------- - cube_or_mesh : :class:`iris.cube.Cube`or :class:`iris.experimental.ugrid.Mesh` + cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh` The Cube or Mesh being saved to the netCDF file. Returns ------- - cf_mesh_name : str or None + str or None The name of the mesh variable created, or None if the cube does not have a mesh. + """ cf_mesh_name = None @@ -1021,7 +1022,7 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names): cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. cf_var_cube : :class:`netcdf.netcdf_variable` - cf variable cube representation. + A cf variable cube representation. dimension_names : list Names associated with the dimensions of the cube. """ @@ -1061,7 +1062,7 @@ def _add_cell_measures(self, cube, cf_var_cube, dimension_names): cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. cf_var_cube : :class:`netcdf.netcdf_variable` - cf variable cube representation. + A cf variable cube representation. dimension_names : list Names associated with the dimensions of the cube. """ @@ -1080,7 +1081,7 @@ def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names): cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. cf_var_cube : :class:`netcdf.netcdf_variable` - cf variable cube representation. + A cf variable cube representation. dimension_names : list Names associated with the dimensions of the cube. """ @@ -1122,7 +1123,7 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names): ---------- cube : :class:`iris.cube.Cube` A :class:`iris.cube.Cube` to be saved to a netCDF file. - cf_var_cube: :class:`netcdf.netcdf_variable` + cf_var_cube : :class:`netcdf.netcdf_variable` CF variable cube representation. dimension_names : list Names associated with the dimensions of the cube. @@ -1212,7 +1213,7 @@ def _get_dim_names(self, cube_or_mesh): mesh_dimensions : list of str A list of the mesh dimensions of the attached mesh, if any. cube_dimensions : list of str - A lists of dimension names for each dimension of the cube + A lists of dimension names for each dimension of the cube. Notes ----- @@ -1418,12 +1419,12 @@ def cf_valid_var_name(var_name): Parameters ---------- var_name : str - The var_name to normalise + The var_name to normalise. Returns ------- str - var_name suitable for passing through for variable creation. + The var_name suitable for passing through for variable creation. """ # Replace invalid characters with an underscore ("_"). @@ -1499,9 +1500,9 @@ def _create_cf_bounds(self, coord, cf_var, cf_name): coord : :class:`iris.coords.Coord` A coordinate of a cube. cf_var : - CF-netCDF variable + CF-netCDF variable. cf_name : str - name of the CF-NetCDF variable. + Name of the CF-NetCDF variable. Returns ------- @@ -1750,7 +1751,7 @@ def _create_generic_cf_array_var( Create the associated CF-netCDF variable in the netCDF dataset for the given dimensional_metadata. - ..note:: + .. note:: If the metadata element is a coord, it may also contain bounds. In which case, an additional var is created and linked to it. @@ -1952,7 +1953,7 @@ def _create_cf_grid_mapping(self, cube, cf_var_cube): A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of cubes to be saved to a netCDF file. cf_var_cube : :class:`netcdf.netcdf_variable` - cf variable cube representation. + A cf variable cube representation. Returns ------- @@ -2202,11 +2203,11 @@ def _create_cf_data_variable( dimension_names : list String names for each dimension of the cube. local_keys : iterable of str, optional - See :func:`iris.fileformats.netcdf.Saver.write` + See :func:`iris.fileformats.netcdf.Saver.write`. packing : type or str or dict or list, optional - See :func:`iris.fileformats.netcdf.Saver.write` + See :func:`iris.fileformats.netcdf.Saver.write`. fill_value : optional - See :func:`iris.fileformats.netcdf.Saver.write` + See :func:`iris.fileformats.netcdf.Saver.write`. Notes ----- @@ -2549,7 +2550,7 @@ def complete(self, issue_warnings=True) -> List[Warning]: Returns ------- - warnings : list of Warning + list of Warning Any warnings that were raised while writing delayed data. """ @@ -2618,7 +2619,7 @@ def save( Name of the netCDF file to save the cube(s). **Or** an open, writeable :class:`netCDF4.Dataset`, or compatible object. - .. Note:: + .. note:: When saving to a dataset, ``compute`` **must** be ``False`` : See the ``compute`` parameter. @@ -2735,7 +2736,7 @@ def save( Returns ------- - result : None or dask.delayed.Delayed + None or dask.delayed.Delayed If `compute=True`, returns `None`. Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed writing to fill in the variables data. diff --git a/lib/iris/fileformats/nimrod.py b/lib/iris/fileformats/nimrod.py index 55927df3ef..d318c94882 100644 --- a/lib/iris/fileformats/nimrod.py +++ b/lib/iris/fileformats/nimrod.py @@ -295,15 +295,13 @@ def load_cubes(filenames, callback=None): Parameters ---------- filenames : - List of NIMROD filenames to load + List of NIMROD filenames to load. callback : optional - A function which can be passed on to :func:`iris.io.run_callback` + A function which can be passed on to :func:`iris.io.run_callback`. Notes ----- - .. note:: - - The resultant cubes may not be in the same order as in the files. + The resultant cubes may not be in the same order as in the files. """ if isinstance(filenames, str): diff --git a/lib/iris/fileformats/nimrod_load_rules.py b/lib/iris/fileformats/nimrod_load_rules.py index 16f23c8a6f..2c0b4334db 100644 --- a/lib/iris/fileformats/nimrod_load_rules.py +++ b/lib/iris/fileformats/nimrod_load_rules.py @@ -654,7 +654,7 @@ def add_attr(item): def known_threshold_coord(field): - """Supplies known threshold coord meta-data for known use cases. + """Supply known threshold coord meta-data for known use cases. threshold_value_alt exists because some meta-data are mis-assigned in the Nimrod data. @@ -895,7 +895,7 @@ def run(field, handle_metadata_errors=True): field : :class:`~iris.fileformats.nimrod.NimrodField` handle_metadata_errors : bool, default=True Set to False to omit handling of known meta-data deficiencies - in Nimrod-format data + in Nimrod-format data. Returns ------- diff --git a/lib/iris/fileformats/pp.py b/lib/iris/fileformats/pp.py index c39c1a53a7..e40b71eff5 100644 --- a/lib/iris/fileformats/pp.py +++ b/lib/iris/fileformats/pp.py @@ -382,8 +382,10 @@ def __init__(self, value, name_mapping_dict=None): ---------- name_mapping_dict : dict A special mapping to provide name based access to specific - integer positions: + integer positions. + Examples + -------- >>> a = SplittableInt(1234, {'hundreds': 2}) >>> print(a.hundreds) 2 @@ -1457,7 +1459,7 @@ class PPField2(PPField): @property def t1(self): - """cftime.datetime object. + """A cftime.datetime object. cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, and lbmin attributes. @@ -1490,7 +1492,7 @@ def t1(self, dt): @property def t2(self): - """cftime.datetime object. + """A cftime.datetime object. cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, and lbmind attributes. @@ -1532,7 +1534,7 @@ class PPField3(PPField): @property def t1(self): - """cftime.datetime object. + """A cftime.datetime object. cftime.datetime object consisting of the lbyr, lbmon, lbdat, lbhr, lbmin, and lbsec attributes. @@ -1566,7 +1568,7 @@ def t1(self, dt): @property def t2(self): - """cftime.datetime object. + """A cftime.datetime object. cftime.datetime object consisting of the lbyrd, lbmond, lbdatd, lbhrd, lbmind, and lbsecd attributes. @@ -1620,7 +1622,7 @@ def load(filename, read_data=False, little_ended=False): Parameters ---------- filename : str - string of the filename to load. + String of the filename to load. read_data : bool, default=False Flag whether or not the data should be read, if False an empty data manager will be provided which can subsequently load the data @@ -2005,11 +2007,11 @@ def load_cubes(filenames, callback=None, constraints=None): Parameters ---------- filenames : - list of pp filenames to load - constraints : optional - A list of Iris constraints + List of pp filenames to load. callback : optional - A function which can be passed on to :func:`iris.io.run_callback` + A function which can be passed on to :func:`iris.io.run_callback`. + constraints : optional + A list of Iris constraints. Notes ----- @@ -2029,11 +2031,11 @@ def load_cubes_little_endian(filenames, callback=None, constraints=None): Parameters ---------- filenames : - list of pp filenames to load - constraints : optional - a list of Iris constraints + List of pp filenames to load. callback : optional - a function which can be passed on to :func:`iris.io.run_callback` + A function which can be passed on to :func:`iris.io.run_callback`. + constraints : optional + A list of Iris constraints. Notes ----- @@ -2161,7 +2163,7 @@ def save(cube, target, append=False, field_coords=None): handle. Default is False. field_coords : optional - list of 2 coords or coord names which are to be used + List of 2 coords or coord names which are to be used for reducing the given cube into 2d slices, which will ultimately determine the x and y coordinates of the resulting fields. @@ -2188,7 +2190,7 @@ def save_pairs_from_cube(cube, field_coords=None, target=None): Parameters ---------- cube : - A :class:`iris.cube.Cube` + A :class:`iris.cube.Cube`. field_coords : optional List of 2 coords or coord names which are to be used for reducing the given cube into 2d slices, which will ultimately @@ -2339,7 +2341,8 @@ def save_fields(fields, target, append: bool = False): See Also -------- - :func:`iris.io.save`. + iris.io.save : + Save one or more Cubes to file (or other writeable). """ # Open issues diff --git a/lib/iris/fileformats/pp_load_rules.py b/lib/iris/fileformats/pp_load_rules.py index 8343afab40..71540fe74a 100644 --- a/lib/iris/fileformats/pp_load_rules.py +++ b/lib/iris/fileformats/pp_load_rules.py @@ -308,7 +308,7 @@ def _reshape_vector_args(values_and_dims): Returns ------- - reshaped_arrays : iterable of arrays + iterable object of arrays The inputs, transposed and reshaped onto common target dimensions. """ @@ -357,7 +357,7 @@ def _collapse_degenerate_points_and_bounds(points, bounds=None, rtol=1.0e-7): bounds : :class:`numpy.ndarray`, optional Array of bounds values. This array should have an additional vertex dimension (typically of length 2) when compared to the points array - i.e. bounds.shape = points.shape + (nvertex,) + i.e. bounds.shape = points.shape + (nvertex,). rtol : optional, default=1.0e-7 Returns @@ -451,18 +451,18 @@ def _new_coord_and_dims( * dimensions with all points and bounds values the same are removed. * the result coordinate may be an AuxCoord if a DimCoord cannot be made - (e.g. if values are non-monotonic). + (e.g. if values are non-monotonic). Parameters ---------- is_vector_operation : bool If True, perform 'vector' style operation. - points : array-like - Coordinate point values. name : str Standard name of coordinate. units : str or cf_unit.Unit Units of coordinate. + points : array-like + Coordinate point values. lower_and_upper_bounds : pair of array-like or None, optional Corresponding bounds values (lower, upper), if any. @@ -932,7 +932,7 @@ def convert(f): def _all_other_rules(f): - """Deals with all the other rules. + """Deal with all the other rules. Deals with all the other rules that have not been factored into any of the other convert_scalar_coordinate functions above. diff --git a/lib/iris/fileformats/pp_save_rules.py b/lib/iris/fileformats/pp_save_rules.py index 73331268e4..b8e95d2160 100644 --- a/lib/iris/fileformats/pp_save_rules.py +++ b/lib/iris/fileformats/pp_save_rules.py @@ -26,7 +26,7 @@ def _basic_coord_system_rules(cube, pp): - """Rules for setting the coord system of the PP field. + """Rule for setting the coord system of the PP field. Parameters ---------- @@ -80,7 +80,7 @@ def _um_version_rules(cube, pp): def _stash_rules(cube, pp): - """Attributes rules for setting the STASH attribute of the PP field. + """Attribute rules for setting the STASH attribute of the PP field. Parameters ---------- @@ -103,7 +103,7 @@ def _stash_rules(cube, pp): def _general_time_rules(cube, pp): - """Rules for setting time metadata of the PP field. + """Rule for setting time metadata of the PP field. Parameters ---------- @@ -377,7 +377,7 @@ def _general_time_rules(cube, pp): def _calendar_rules(cube, pp): - """Rules for setting the calendar of the PP field. + """Rule for setting the calendar of the PP field. Parameters ---------- @@ -403,7 +403,7 @@ def _calendar_rules(cube, pp): def _grid_and_pole_rules(cube, pp): - """Rules for setting the horizontal grid and pole location of the PP field. + """Rule for setting the horizontal grid and pole location of the PP field. Parameters ---------- @@ -485,7 +485,7 @@ def _grid_and_pole_rules(cube, pp): def _non_std_cross_section_rules(cube, pp): - """Rules for applying non-standard cross-sections to the PP field. + """Rule for applying non-standard cross-sections to the PP field. Parameters ---------- @@ -616,7 +616,7 @@ def _non_std_cross_section_rules(cube, pp): def _lbproc_rules(cube, pp): - """Rules for setting the processing code of the PP field. + """Rule for setting the processing code of the PP field. Note: `pp.lbproc` must be set to 0 before these rules are run. @@ -664,7 +664,7 @@ def _lbproc_rules(cube, pp): def _vertical_rules(cube, pp): - """Rules for setting vertical levels for the PP field. + """Rule for setting vertical levels for the PP field. Parameters ---------- @@ -849,7 +849,7 @@ def _vertical_rules(cube, pp): def _all_other_rules(cube, pp): - """Fields currently managed by these rules. + """Field currently managed by these rules. * lbfc (field code) * lbrsvd[3] (ensemble member number) diff --git a/lib/iris/fileformats/um/_ff_replacement.py b/lib/iris/fileformats/um/_ff_replacement.py index 52afe343c3..e36260a335 100644 --- a/lib/iris/fileformats/um/_ff_replacement.py +++ b/lib/iris/fileformats/um/_ff_replacement.py @@ -56,9 +56,9 @@ def load_cubes(filenames, callback, constraints=None, _loader_kwargs=None): Parameters ---------- filenames : - list of filenames to load + List of filenames to load. callback : optional - A function which can be passed on to :func:`iris.io.run_callback` + A function which can be passed on to :func:`iris.io.run_callback`. constraints : optional _loader_kwargs : optional @@ -86,7 +86,7 @@ def load_cubes_32bit_ieee(filenames, callback, constraints=None): See Also -------- :func:`load_cubes` - For keyword details + For keyword details. """ return load_cubes( diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py index d5896f25a4..50f0f3c4e6 100644 --- a/lib/iris/io/__init__.py +++ b/lib/iris/io/__init__.py @@ -153,11 +153,11 @@ def expand_filespecs(file_specs, files_expected=True): Returns ------- list of str - if files_expected is ``True``: + If files_expected is ``True``: A well-ordered list of matching absolute file paths. If any of the file-specs match no existing files, an exception is raised. - if files_expected is ``False``: + If files_expected is ``False``: A list of expanded file paths. """ # Remove any hostname component - currently unused @@ -334,7 +334,8 @@ def add_saver(file_extension, new_saver): See Also -------- - :func:`iris.io.save` + iris.io.save : + Save one or more Cubes to file (or other writeable). """ # Make sure it's a func with 2+ args diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py index d5e54d231a..b1b93707c9 100644 --- a/lib/iris/io/format_picker.py +++ b/lib/iris/io/format_picker.py @@ -175,21 +175,22 @@ def __init__( Parameters ---------- format_name : str - string name of fileformat being described + String name of fileformat being described. file_element : FileElement instance of the element which identifies this - FormatSpecification + FormatSpecification. file_element_value : The value that the file_element should take if a file matches this - FormatSpecification + FormatSpecification. handler : optional - function which will be called when the specification has been + Function which will be called when the specification has been identified and is required to handler a format. If None, then the file can still be identified but no handling can be done. - priority: int + priority : int Integer giving a priority for considering this specification where higher priority means sooner consideration. - constraint_aware_handler: default=False + constraint_aware_handler : default=False + """ if not isinstance(file_element, FileElement): raise ValueError( diff --git a/lib/iris/palette.py b/lib/iris/palette.py index e180b649a8..522a89fa1b 100644 --- a/lib/iris/palette.py +++ b/lib/iris/palette.py @@ -55,7 +55,7 @@ def is_brewer(cmap): def _default_cmap_norm(args, kwargs): - """Injects default cmap and norm behaviour into the keyword arguments. + """Inject default cmap and norm behaviour into the keyword arguments. This function injects default cmap and norm behaviour into the keyword arguments, based on the cube referenced within the positional arguments. @@ -123,7 +123,7 @@ def cmap_norm(cube): ------- tuple Tuple of :class:`matplotlib.colors.LinearSegmentedColormap` and - :class:`iris.palette.SymmetricNormalize` + :class:`iris.palette.SymmetricNormalize`. Notes ----- diff --git a/lib/iris/pandas.py b/lib/iris/pandas.py index 1e79e1b31e..2efad54613 100644 --- a/lib/iris/pandas.py +++ b/lib/iris/pandas.py @@ -124,16 +124,10 @@ def as_cube( ): """Convert a Pandas Series/DataFrame into a 1D/2D Iris Cube. - .. deprecated:: 3.3.0 - - This function is scheduled for removal in a future release, being - replaced by :func:`iris.pandas.as_cubes`, which offers richer - dimensional intelligence. - Parameters ---------- pandas_array : :class:`pandas.Series` or :class:`pandas.DataFrame` - The Pandas object to convert + The Pandas object to convert. copy : bool, default=True Whether to copy `pandas_array`, or to create array views where possible. Provided in case of memory limit concerns. @@ -145,13 +139,21 @@ def as_cube( ----- This function will copy your data by default. - Example usage:: + Examples + -------- + :: as_cube(series, calendars={0: cf_units.CALENDAR_360_DAY}) as_cube(data_frame, calendars={1: cf_units.CALENDAR_STANDARD}) Since this function converts to/from a Pandas object, laziness will not be preserved. + .. deprecated:: 3.3.0 + + This function is scheduled for removal in a future release, being + replaced by :func:`iris.pandas.as_cubes`, which offers richer + dimensional intelligence. + """ message = ( "iris.pandas.as_cube has been deprecated, and will be removed in a " @@ -202,7 +204,7 @@ def as_cubes( Parameters ---------- pandas_structure : :class:`pandas.Series` or :class:`pandas.DataFrame` - The Pandas object to convert + The Pandas object to convert. copy : bool, default=True Whether the Cube :attr:`~iris.cube.Cube.data` is a copy of the `pandas_structure` column, or a view of the same array. Arrays other than @@ -564,14 +566,9 @@ def _make_cell_measures_list(cube): def as_series(cube, copy=True): """Convert a 1D cube to a Pandas Series. - .. deprecated:: 3.4.0 - This function is scheduled for removal in a future release, being - replaced by :func:`iris.pandas.as_data_frame`, which offers improved - multi dimension handling. - Parameters ---------- - cube: :class:`Cube` + cube : :class:`Cube` The cube to convert to a Pandas Series. copy : bool, default=True Whether to make a copy of the data. @@ -585,6 +582,12 @@ def as_series(cube, copy=True): Since this function converts to/from a Pandas object, laziness will not be preserved. + .. deprecated:: 3.4.0 + + This function is scheduled for removal in a future release, being + replaced by :func:`iris.pandas.as_data_frame`, which offers improved + multi dimension handling. + """ message = ( "iris.pandas.as_series has been deprecated, and will be removed in a " @@ -644,7 +647,7 @@ def as_data_frame( ------- :class:`~pandas.DataFrame` A :class:`~pandas.DataFrame` with :class:`~iris.cube.Cube` dimensions - forming a :class:`~pandas.MultiIndex` + forming a :class:`~pandas.MultiIndex`. Warnings -------- @@ -664,8 +667,6 @@ def as_data_frame( :class:`~pandas.DataFrame` column (the legacy behaviour preserves 2 dimensions via rows and columns). - | - #. Where the :class:`~iris.cube.Cube` contains masked values, these become :data:`numpy.nan` in the returned :class:`~pandas.DataFrame`. @@ -680,6 +681,8 @@ def as_data_frame( :class:`~iris.cube.Cube` data `dtype` is preserved. + Since this function converts to/from a Pandas object, laziness will not be preserved. + Examples -------- >>> import iris @@ -794,10 +797,6 @@ def as_data_frame( 419903 298.995148 Name: surface_temperature, Length: 419904, dtype: float32 - Notes - ----- - Since this function converts to/from a Pandas object, laziness will not be preserved. - """ def merge_metadata(meta_var_list): diff --git a/lib/iris/plot.py b/lib/iris/plot.py index e9f73bd86b..a0c5f55274 100644 --- a/lib/iris/plot.py +++ b/lib/iris/plot.py @@ -256,7 +256,7 @@ def ticker_func(tick_location, _): def _invert_yaxis(v_coord, axes=None): - """Inverts the y-axis of the current plot based on conditions. + """Invert the y-axis of the current plot based on conditions. * If the y-axis is already inverted we don't want to re-invert it. * If v_coord is None then it will not have any attributes. @@ -266,7 +266,7 @@ def _invert_yaxis(v_coord, axes=None): Parameters ---------- v_coord : - The coord to be plotted on the y-axis + The coord to be plotted on the y-axis. axes : optional """ @@ -294,9 +294,9 @@ def _check_bounds_contiguity_and_mask(coord, data, atol=None, rtol=None): Parameters ---------- coord : iris.coord.Coord - Coordinate the bounds of which will be checked for contiguity + Coordinate the bounds of which will be checked for contiguity. data : array - Data of the the cube we are plotting + Data of the the cube we are plotting. atol : optional Absolute tolerance when checking the contiguity. Defaults to None. If an absolute tolerance is not set, 1D coords are not checked (so @@ -706,7 +706,7 @@ def _get_geodesic_params(globe): def _shift_plot_sections(u_object, u, v): - """Shifts subsections of u by multiples of 360 degrees. + """Shift subsections of u by multiples of 360 degrees. Shifts subsections of u by multiples of 360 degrees within ranges defined by the points where the line should cross over the 0/360 degree @@ -1052,7 +1052,7 @@ def _map_common(draw_method_name, arg_func, mode, cube, plot_defn, *args, **kwar def contour(cube, *args, **kwargs): - """Draws contour lines based on the given Cube. + """Draw contour lines based on the given Cube. Parameters ---------- @@ -1080,7 +1080,7 @@ def contour(cube, *args, **kwargs): def contourf(cube, *args, **kwargs): - """Draws filled contours based on the given Cube. + """Draw filled contours based on the given Cube. Parameters ---------- @@ -1298,7 +1298,7 @@ def horiz_plot(v_coord, orography, style_args): def outline(cube, coords=None, color="k", linewidth=None, axes=None): - """Draws cell outlines based on the given Cube. + """Draw cell outlines based on the given Cube. Parameters ---------- @@ -1339,7 +1339,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): def pcolor(cube, *args, **kwargs): - """Draws a pseudocolor plot based on the given 2-dimensional Cube. + """Draw a pseudocolor plot based on the given 2-dimensional Cube. The cube must have either two 1-dimensional coordinates or two 2-dimensional coordinates with contiguous bounds to plot the cube against. @@ -1375,7 +1375,7 @@ def pcolor(cube, *args, **kwargs): def pcolormesh(cube, *args, **kwargs): - """Draws a pseudocolor plot based on the given 2-dimensional Cube. + """Draw a pseudocolor plot based on the given 2-dimensional Cube. The cube must have either two 1-dimensional coordinates or two 2-dimensional coordinates with contiguous bounds to plot against each @@ -1409,7 +1409,7 @@ def pcolormesh(cube, *args, **kwargs): def points(cube, *args, **kwargs): - """Draws sample point positions based on the given Cube. + """Draw sample point positions based on the given Cube. Parameters ---------- @@ -1479,7 +1479,7 @@ def _vector_component_args(x_points, y_points, u_data, *args, **kwargs): return ((x_points, y_points, u_data, v_data), kwargs) -def barbs(u_cube, v_cube, *args, **kwargs): +def barbs(u_cube, v_cube, *args, **kwargs): # numpydoc ignore=PR08 """Draw a barb plot from two vector component cubes. Draws a barb plot from two vector component cubes. Triangles, full-lines @@ -1531,8 +1531,8 @@ def barbs(u_cube, v_cube, *args, **kwargs): ) -def quiver(u_cube, v_cube, *args, **kwargs): - """Draws an arrow plot from two vector component cubes. +def quiver(u_cube, v_cube, *args, **kwargs): # numpydoc ignore=PR08 + """Draw an arrow plot from two vector component cubes. Parameters ---------- @@ -1582,7 +1582,7 @@ def quiver(u_cube, v_cube, *args, **kwargs): def plot(*args, **kwargs): - """Draws a line plot based on the given cube(s) or coordinate(s). + """Draw a line plot based on the given cube(s) or coordinate(s). Parameters ---------- @@ -1635,7 +1635,7 @@ def plot(*args, **kwargs): def scatter(x, y, *args, **kwargs): - """Draws a scatter plot based on the given cube(s) or coordinate(s). + """Draw a scatter plot based on the given cube(s) or coordinate(s). Parameters ---------- @@ -1740,7 +1740,7 @@ def hist(x, *args, **kwargs): def symbols(x, y, symbols, size, axes=None, units="inches"): - """Draws fixed-size symbols. + """Draw fixed-size symbols. See :mod:`iris.symbols` for available symbols. @@ -1830,7 +1830,7 @@ def citation(text, figure=None, axes=None): def animate(cube_iterator, plot_func, fig=None, **kwargs): - """Animates the given cube iterator. + """Animate the given cube iterator. Parameters ---------- @@ -1846,15 +1846,15 @@ def animate(cube_iterator, plot_func, fig=None, **kwargs): fig : :class:`matplotlib.figure.Figure` instance, optional By default, the current figure will be used or a new figure instance created if no figure is available. See :func:`matplotlib.pyplot.gcf`. - coords: list of :class:`~iris.coords.Coord` objects or coordinate names, optional + coords : list of :class:`~iris.coords.Coord` objects or coordinate names, optional Use the given coordinates as the axes for the plot. The order of the given coordinates indicates which axis to use for each, where the first element is the horizontal axis of the plot and the second element is the vertical axis of the plot. - interval: int, float or long, optional + interval : int, float or long, optional Defines the time interval in milliseconds between successive frames. A default interval of 100ms is set. - vmin, vmax: int, float or long, optional + vmin, vmax : int, float or long, optional Color scaling values, see :class:`matplotlib.colors.Normalize` for further details. Default values are determined by the min-max across the data set over the entire sequence. diff --git a/lib/iris/quickplot.py b/lib/iris/quickplot.py index 14380019f3..b7d6e53f84 100644 --- a/lib/iris/quickplot.py +++ b/lib/iris/quickplot.py @@ -135,7 +135,7 @@ def _label_1d_plot(*args, **kwargs): def contour(cube, *args, **kwargs): - """Draws contour lines on a labelled plot based on the given Cube. + """Draw contour lines on a labelled plot based on the given Cube. With the basic call signature, contour "level" values are chosen automatically:: @@ -166,7 +166,7 @@ def contour(cube, *args, **kwargs): def contourf(cube, *args, **kwargs): - """Draws filled contours on a labelled plot based on the given Cube. + """Draw filled contours on a labelled plot based on the given Cube. With the basic call signature, contour "level" values are chosen automatically:: @@ -196,7 +196,7 @@ def contourf(cube, *args, **kwargs): def outline(cube, coords=None, color="k", linewidth=None, axes=None): - """Draws cell outlines on a labelled plot based on the given Cube. + """Draw cell outlines on a labelled plot based on the given Cube. Parameters ---------- @@ -227,7 +227,7 @@ def outline(cube, coords=None, color="k", linewidth=None, axes=None): def pcolor(cube, *args, **kwargs): - """Draws a labelled pseudocolor plot based on the given Cube. + """Draw a labelled pseudocolor plot based on the given Cube. See :func:`iris.plot.pcolor` for details of valid keyword arguments. @@ -244,7 +244,7 @@ def pcolor(cube, *args, **kwargs): def pcolormesh(cube, *args, **kwargs): - """Draws a labelled pseudocolour plot based on the given Cube. + """Draw a labelled pseudocolour plot based on the given Cube. See :func:`iris.plot.pcolormesh` for details of valid keyword arguments. @@ -262,7 +262,7 @@ def pcolormesh(cube, *args, **kwargs): def points(cube, *args, **kwargs): - """Draws sample point positions on a labelled plot based on the given Cube. + """Draw sample point positions on a labelled plot based on the given Cube. See :func:`iris.plot.points` for details of valid keyword arguments. @@ -280,7 +280,7 @@ def points(cube, *args, **kwargs): def plot(*args, **kwargs): - """Draws a labelled line plot based on the given cube(s) or coordinate(s). + """Draw a labelled line plot based on the given cube(s) or coordinate(s). See :func:`iris.plot.plot` for details of valid arguments and keyword arguments. @@ -298,7 +298,7 @@ def plot(*args, **kwargs): def scatter(x, y, *args, **kwargs): - """Draws a labelled scatter plot based on the given cubes or coordinates. + """Draw a labelled scatter plot based on the given cubes or coordinates. See :func:`iris.plot.scatter` for details of valid arguments and keyword arguments. @@ -316,7 +316,7 @@ def scatter(x, y, *args, **kwargs): def fill_between(x, y1, y2, *args, **kwargs): - """Draws a labelled fill_between plot based on the given cubes or coordinates. + """Draw a labelled fill_between plot based on the given cubes or coordinates. See :func:`iris.plot.fill_between` for details of valid arguments and keyword arguments. diff --git a/lib/iris/util.py b/lib/iris/util.py index 020b67783a..b98db8090f 100644 --- a/lib/iris/util.py +++ b/lib/iris/util.py @@ -191,7 +191,8 @@ def describe_diff(cube_a, cube_b, output_file=None): See Also -------- - :meth:`iris.cube.Cube.is_compatible()` + iris.cube.Cube.is_compatible : + Check if a Cube is compatible with another. """ if output_file is None: @@ -287,13 +288,13 @@ def rolling_window(a, window=1, step=1, axis=-1): Parameters ---------- a : array_like - Array to add rolling window to + Array to add rolling window to. window : int, default=1 - Size of rolling window + Size of rolling window. step : int, default=1 - Size of step between rolling windows + Size of step between rolling windows. axis : int, default=-1 - Axis to take the rolling window over + Axis to take the rolling window over. Returns ------- @@ -359,7 +360,7 @@ def array_equal(array1, array2, withnans=False): Parameters ---------- array1, array2 : arraylike - args to be compared, normalised if necessary with :func:`np.asarray`. + Args to be compared, normalised if necessary with :func:`np.asarray`. withnans : bool, default=False When unset (default), the result is False if either input contains NaN points. This is the normal floating-point arithmetic result. @@ -400,6 +401,11 @@ def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): Returns whether two numbers are almost equal, allowing for the finite precision of floating point numbers. + Notes + ----- + This function does maintain laziness when called; it doesn't realise data. + See more at :doc:`/userguide/real_and_lazy_data`. + .. deprecated:: 3.2.0 Instead use :func:`math.isclose`. For example, rather than calling @@ -408,11 +414,6 @@ def approx_equal(a, b, max_absolute_error=1e-10, max_relative_error=1e-10): if the actual error equals the maximum, whereas :func:`util.approx_equal` will return False. - Notes - ----- - This function does maintain laziness when called; it doesn't realise data. - See more at :doc:`/userguide/real_and_lazy_data`. - """ wmsg = ( "iris.util.approx_equal has been deprecated and will be removed, " @@ -438,9 +439,9 @@ def between(lh, rh, lh_inclusive=True, rh_inclusive=True): Parameters ---------- lh : - The left hand element of the inequality + The left hand element of the inequality. rh : - The right hand element of the inequality + The right hand element of the inequality. lh_inclusive : bool, default=True Affects the left hand comparison operator to use in the inequality. True for ``<=`` false for ``<``. Defaults to True. @@ -581,7 +582,7 @@ def monotonic(array, strict=False, return_direction=False): Parameters ---------- strict : bool, default=False - Flag to enable strict monotonic checking + Flag to enable strict monotonic checking. return_direction : bool, default=False Flag to change return behaviour to return (monotonic_status, direction). Direction will be 1 for positive @@ -590,11 +591,9 @@ def monotonic(array, strict=False, return_direction=False): Returns ------- - monotonic_status : bool - Whether the array was monotonic. - - If the return_direction flag was given then the returned value - will be: ``(monotonic_status, direction)`` + bool + Whether the array was monotonic. If the return_direction flag was given + then the returned value will be: ``(monotonic_status, direction)``. Notes ----- @@ -783,9 +782,9 @@ def _slice_data_with_keys(data, keys): Parameters ---------- data : array-like - array to index. + Array to index. keys : list - list of indexes, as received from a __getitem__ call. + List of indexes, as received from a __getitem__ call. Returns ------- @@ -801,7 +800,7 @@ def _slice_data_with_keys(data, keys): both 'real' (numpy) arrays and other array-likes index in the same way, instead of numpy arrays doing 'fancy indexing'. - .. Note:: + .. note:: Avoids copying the data, where possible. @@ -1022,7 +1021,7 @@ def clip_string(the_str, clip_length=70, rider="..."): Parameters ---------- the_str : str - The string to be clipped + The string to be clipped. clip_length : int, default=70 The length in characters that the input string should be clipped to. Defaults to a preconfigured value if not specified. @@ -1101,7 +1100,7 @@ def new_axis(src_cube, scalar_coord=None, expand_extras=()): # maybe not lazy ---------- src_cube : :class:`iris.cube.Cube` Source cube on which to generate a new axis. - scalar_coord : :class:`iris.coord.Coord` or 'string', optional + scalar_coord : :class:`iris.coord.Coord` or str, optional Scalar coordinate to promote to a dimension coordinate. expand_extras : iterable, optional Auxiliary coordinates, ancillary variables and cell measures which will @@ -1531,7 +1530,7 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): Parameters ---------- cube : - An instance of :class:`iris.cube.Cube` + An instance of :class:`iris.cube.Cube`. name_or_coord : * \(a) An instance of :class:`iris.coords.AuxCoord` * \(b) the :attr:`standard_name`, :attr:`long_name`, or @@ -1643,7 +1642,7 @@ def promote_aux_coord_to_dim_coord(cube, name_or_coord): def demote_dim_coord_to_aux_coord(cube, name_or_coord): - r"""Demotes a dimension coordinate on the cube to an auxiliary coordinate. + r"""Demote a dimension coordinate on the cube to an auxiliary coordinate. The DimCoord is demoted to an auxiliary coordinate on the cube. The dimension of the cube that was associated with the DimCoord becomes @@ -1653,7 +1652,7 @@ def demote_dim_coord_to_aux_coord(cube, name_or_coord): Parameters ---------- cube : - An instance of :class:`iris.cube.Cube` + An instance of :class:`iris.cube.Cube`. name_or_coord : * \(a) An instance of :class:`iris.coords.DimCoord` * \(b) the :attr:`standard_name`, :attr:`long_name`, or @@ -1768,8 +1767,8 @@ def find_discontiguities(cube, rel_tol=1e-5, abs_tol=1e-8): Returns ------- - result : `numpy.ndarray` of bool - true/false map of which cells in the cube XY grid have + numpy.ndarray + Boolean True/false map of which cells in the cube XY grid have discontiguities in the coordinate points array. This can be used as the input array for @@ -1895,7 +1894,7 @@ def _mask_array(array, points_to_mask, in_place=False): @_lenient_client(services=SERVICES) def mask_cube(cube, points_to_mask, in_place=False, dim=None): - """Masks any cells in the cube's data array. + """Mask any cells in the cube's data array. Masks any cells in the cube's data array which correspond to cells marked ``True`` (or non zero) in ``points_to_mask``. ``points_to_mask`` may be @@ -2098,12 +2097,12 @@ def mask_cube_from_shapefile(cube, shape, minimum_weight=0.0, in_place=False): Parameters ---------- + cube : :class:`~iris.cube.Cube` object + The `Cube` object to masked. Must be singular, rather than a `CubeList`. shape : Shapely.Geometry object A single `shape` of the area to remain unmasked on the `cube`. If it a line object of some kind then minimum_weight will be ignored, - because you cannot compare the area of a 1D line and 2D Cell - cube : :class:`~iris.cube.Cube` object - The `Cube` object to masked. Must be singular, rather than a `CubeList` + because you cannot compare the area of a 1D line and 2D Cell. minimum_weight : float , default=0.0 A number between 0-1 describing what % of a cube cell area must the shape overlap to include it. @@ -2114,12 +2113,12 @@ def mask_cube_from_shapefile(cube, shape, minimum_weight=0.0, in_place=False): Returns ------- iris.Cube - A masked version of the input cube, if in_place is False - + A masked version of the input cube, if in_place is False. See Also -------- :func:`~iris.util.mask_cube` + Mask any cells in the cube’s data array. Notes ----- @@ -2141,7 +2140,6 @@ def mask_cube_from_shapefile(cube, shape, minimum_weight=0.0, in_place=False): >>> shape = shapely.geometry.box(-100,30, -80,40) # box between 30N-40N 100W-80W >>> masked_cube = mask_cube_from_shapefile(cube, shape) - ... """ shapefile_mask = create_shapefile_mask(shape, cube, minimum_weight) masked_cube = mask_cube(cube, shapefile_mask, in_place=in_place) diff --git a/noxfile.py b/noxfile.py index a30b6ce784..81c8b02fef 100644 --- a/noxfile.py +++ b/noxfile.py @@ -79,7 +79,7 @@ def cache_venv(session: nox.sessions.Session) -> None: Parameters ---------- - session: object + session : object A `nox.sessions.Session` object. """ @@ -96,7 +96,7 @@ def cache_cartopy(session: nox.sessions.Session) -> None: Parameters ---------- - session: object + session : object A `nox.sessions.Session` object. """ @@ -117,7 +117,7 @@ def prepare_venv(session: nox.sessions.Session) -> None: Parameters ---------- - session: object + session : object A `nox.sessions.Session` object. Notes @@ -174,7 +174,7 @@ def tests(session: nox.sessions.Session): Parameters ---------- - session: object + session : object A `nox.sessions.Session` object. """ @@ -198,7 +198,7 @@ def doctest(session: nox.sessions.Session): Parameters ---------- - session: object + session : object A `nox.sessions.Session` object. """ @@ -225,7 +225,7 @@ def gallery(session: nox.sessions.Session): Parameters ---------- - session: object + session : object A `nox.sessions.Session` object. """ @@ -246,7 +246,7 @@ def linkcheck(session: nox.sessions.Session): Parameters ---------- - session: object + session : object A `nox.sessions.Session` object. """ @@ -272,7 +272,7 @@ def wheel(session: nox.sessions.Session): Parameters ---------- - session: object + session : object A `nox.sessions.Session` object. """ diff --git a/pyproject.toml b/pyproject.toml index 4325de0e0e..f55a77cfdf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -162,3 +162,50 @@ ignore = [ "lib/iris/_version.py", "lib/iris/std_names.py", ] + +[tool.numpydoc_validation] +checks = [ + "all", # Enable all numpydoc validation rules, apart from the following: + + # -> Docstring text (summary) should start in the line immediately + # after the opening quotes (not in the same line, or leaving a + # blank line in between) + "GL01", # Permit summary line on same line as docstring opening quotes. + + # -> Closing quotes should be placed in the line after the last text + # in the docstring (do not close the quotes in the same line as + # the text, or leave a blank line between the last text and the + # quotes) + "GL02", # Permit a blank line before docstring closing quotes. + + # -> Double line break found; please use only one blank line to + # separate sections or paragraphs, and do not leave blank lines + # at the end of docstrings + "GL03", # Ignoring. + + # -> See Also section not found + "SA01", # Not all docstrings require a "See Also" section. + + # -> No extended summary found + "ES01", # Not all docstrings require an "Extended Summary" section. + + # -> No examples section found + "EX01", # Not all docstrings require an "Examples" section. + + # -> No Yields section found + "YD01", # Not all docstrings require a "Yields" section. + + # Temporary checks to ignore, will be reviewed at a later date. + "GL08", # The object does not have a docstrings not have a docstring + "PR01", # Parameters ... not documented + "PR02", # Unknown parameters {....} + "PR04", # Parameter "...." has no type + "PR07", # Parameter "...." has no description + "RT01", # No Returns section found + "RT03", # Return value has no description +] +exclude = [ + '\.__eq__$', + '\.__ne__$', + '\.__repr__$', +]