From 3c662ab3d49b1d68a4f9880ca69bd58bba6f4d62 Mon Sep 17 00:00:00 2001
From: Tremain Knight <2108488+tkknight@users.noreply.github.com>
Date: Mon, 13 Nov 2023 15:12:21 +0000
Subject: [PATCH 1/6] baseline
---
docs/src/conf.py | 3 +-
lib/iris/config.py | 51 +-
lib/iris/fileformats/netcdf/__init__.py | 3 +-
lib/iris/fileformats/netcdf/_dask_locks.py | 80 +--
.../fileformats/netcdf/_thread_safe_nc.py | 104 ++--
lib/iris/fileformats/netcdf/loader.py | 37 +-
lib/iris/fileformats/netcdf/saver.py | 588 +++++++++---------
lib/iris/io/__init__.py | 151 ++---
lib/iris/io/format_picker.py | 117 ++--
lib/iris/time.py | 53 +-
10 files changed, 620 insertions(+), 567 deletions(-)
diff --git a/docs/src/conf.py b/docs/src/conf.py
index 8be34c1989..6282a68ba4 100644
--- a/docs/src/conf.py
+++ b/docs/src/conf.py
@@ -16,7 +16,6 @@
#
# All configuration values have a default; values that are commented out
# serve to show the default.
-
# ----------------------------------------------------------------------------
import datetime
@@ -196,7 +195,7 @@ def _dotv(version):
todo_include_todos = True
# api generation configuration
-autodoc_member_order = "groupwise"
+autodoc_member_order = "bysource"
autodoc_default_flags = ["show-inheritance"]
# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_typehints
diff --git a/lib/iris/config.py b/lib/iris/config.py
index 03d3d363a6..b0060a0974 100644
--- a/lib/iris/config.py
+++ b/lib/iris/config.py
@@ -28,6 +28,7 @@
The [optional] name of the logger to notify when first imported.
----------
+
"""
import configparser
@@ -43,41 +44,37 @@ def get_logger(
name, datefmt=None, fmt=None, level=None, propagate=None, handler=True
):
"""
+ Create a custom class for logging.
+
Create a :class:`logging.Logger` with a :class:`logging.StreamHandler`
and custom :class:`logging.Formatter`.
- Args:
-
- * name:
+ Parameters
+ ----------
+ name
The name of the logger. Typically this is the module filename that
owns the logger.
-
- Kwargs:
-
- * datefmt:
+ datefmt: optional
The date format string of the :class:`logging.Formatter`.
Defaults to ``%d-%m-%Y %H:%M:%S``.
-
- * fmt:
+ fmt: optional
The additional format string of the :class:`logging.Formatter`.
This is appended to the default format string
``%(asctime)s %(name)s %(levelname)s - %(message)s``.
-
- * level:
+ level: optional
The threshold level of the logger. Defaults to ``INFO``.
-
- * propagate:
+ propagate: optional
Sets the ``propagate`` attribute of the :class:`logging.Logger`,
which determines whether events logged to this logger will be
passed to the handlers of higher level loggers. Defaults to
``False``.
-
- * handler:
+ handler: optional
Create and attach a :class:`logging.StreamHandler` to the
logger. Defaults to ``True``.
- Returns:
- A :class:`logging.Logger`.
+ Returns
+ -------
+ :class:`logging.Logger`.
"""
if level is None:
@@ -119,6 +116,8 @@ def get_logger(
# Returns simple string options
def get_option(section, option, default=None):
"""
+ Return the option value for the given section.
+
Returns the option value for the given section, or the default value
if the section/option is not present.
@@ -132,6 +131,8 @@ def get_option(section, option, default=None):
# Returns directory path options
def get_dir_option(section, option, default=None):
"""
+ Return the directory path from the given option and section.
+
Returns the directory path from the given option and section, or
returns the given default value if the section/option is not present
or does not represent a valid directory.
@@ -197,20 +198,19 @@ def __init__(self, conventions_override=None):
"""
Set up NetCDF processing options for Iris.
- Currently accepted kwargs:
-
- * conventions_override (bool):
+ Parameters
+ ----------
+ conventions_override: bool, optional
Define whether the CF Conventions version (e.g. `CF-1.6`) set when
saving a cube to a NetCDF file should be defined by
- Iris (the default) or the cube being saved.
-
- If `False` (the default), specifies that Iris should set the
+ Iris (the default) or the cube being saved. If `False`
+ (the default), specifies that Iris should set the
CF Conventions version when saving cubes as NetCDF files.
If `True`, specifies that the cubes being saved to NetCDF should
set the CF Conventions version for the saved NetCDF files.
- Example usages:
-
+ Examples
+ --------
* Specify, for the lifetime of the session, that we want all cubes
written to NetCDF to define their own CF Conventions versions::
@@ -277,6 +277,7 @@ def _defaults_dict(self):
def context(self, **kwargs):
"""
Allow temporary modification of the options via a context manager.
+
Accepted kwargs are the same as can be supplied to the Option.
"""
diff --git a/lib/iris/fileformats/netcdf/__init__.py b/lib/iris/fileformats/netcdf/__init__.py
index b696b200ff..bf3074af0e 100644
--- a/lib/iris/fileformats/netcdf/__init__.py
+++ b/lib/iris/fileformats/netcdf/__init__.py
@@ -4,8 +4,7 @@
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
-Module to support the loading and saving of NetCDF files, also using the CF conventions
-for metadata interpretation.
+Support loading and saving NetCDF files using CF conventions for metadata interpretation.
See : `NetCDF User's Guide `_
and `netCDF4 python module `_.
diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py
index 15ac117a8b..9f572d96cf 100644
--- a/lib/iris/fileformats/netcdf/_dask_locks.py
+++ b/lib/iris/fileformats/netcdf/_dask_locks.py
@@ -6,45 +6,49 @@
"""
Module containing code to create locks enabling dask workers to co-operate.
-This matter is complicated by needing different solutions for different dask scheduler
-types, i.e. local 'threads' scheduler, local 'processes' or distributed.
+This matter is complicated by needing different solutions for different dask
+scheduler types, i.e. local 'threads' scheduler, local 'processes' or
+distributed.
-In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a netCDF4.Dataset
-targeting an output file, and creates a Saver.file_write_lock object to serialise
-write-accesses to the file from dask tasks : All dask-task file writes go via a
-"iris.fileformats.netcdf.saver.NetCDFWriteProxy" object, which also contains a link
-to the Saver.file_write_lock, and uses it to prevent workers from fouling each other.
+In any case, an "iris.fileformats.netcdf.saver.Saver" object contains a
+netCDF4.Dataset targeting an output file, and creates a Saver.file_write_lock
+object to serialise write-accesses to the file from dask tasks : All dask-task
+file writes go via a "iris.fileformats.netcdf.saver.NetCDFWriteProxy" object,
+which also contains a link to the Saver.file_write_lock, and uses it to prevent
+workers from fouling each other.
For each chunk written, the NetCDFWriteProxy acquires the common per-file lock;
-opens a Dataset on the file; performs a write to the relevant variable; closes the
-Dataset and then releases the lock. This process is obviously very similar to what the
-NetCDFDataProxy does for reading lazy chunks.
+opens a Dataset on the file; performs a write to the relevant variable; closes
+the Dataset and then releases the lock. This process is obviously very similar
+to what the NetCDFDataProxy does for reading lazy chunks.
-For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The workers
-(threads) execute tasks which contain a NetCDFWriteProxy, as above. All of those
-contain the common lock, and this is simply **the same object** for all workers, since
-they share an address space.
+For a threaded scheduler, the Saver.lock is a simple threading.Lock(). The
+workers (threads) execute tasks which contain a NetCDFWriteProxy, as above.
+All of those contain the common lock, and this is simply **the same object**
+for all workers, since they share an address space.
For a distributed scheduler, the Saver.lock is a `distributed.Lock()` which is
identified with the output filepath. This is distributed to the workers by
-serialising the task function arguments, which will include the NetCDFWriteProxy.
-A worker behaves like a process, though it may execute on a remote machine. When a
-distributed.Lock is deserialised to reconstruct the worker task, this creates an object
-that communicates with the scheduler. These objects behave as a single common lock,
-as they all have the same string 'identity', so the scheduler implements inter-process
-communication so that they can mutually exclude each other.
+serialising the task function arguments, which will include the
+NetCDFWriteProxy. A worker behaves like a process, though it may execute on a
+remote machine. When a distributed.Lock is deserialised to reconstruct the
+worker task, this creates an object that communicates with the scheduler.
+These objects behave as a single common lock, as they all have the same string
+'identity', so the scheduler implements inter-process communication so that
+they can mutually exclude each other.
It is also *conceivable* that multiple processes could write to the same file in
-parallel, if the operating system supports it. However, this also requires that the
-libnetcdf C library is built with parallel access option, which is not common.
-With the "ordinary" libnetcdf build, a process which attempts to open for writing a file
-which is _already_ open for writing simply raises an access error.
-In any case, Iris netcdf saver will not support this mode of operation, at present.
+parallel, if the operating system supports it. However, this also requires
+that the libnetcdf C library is built with parallel access option, which is
+not common. With the "ordinary" libnetcdf build, a process which attempts to
+open for writing a file which is _already_ open for writing simply raises an
+access error. In any case, Iris netcdf saver will not support this mode of
+operation, at present.
We don't currently support a local "processes" type scheduler. If we did, the
-behaviour should be very similar to a distributed scheduler. It would need to use some
-other serialisable shared-lock solution in place of 'distributed.Lock', which requires
-a distributed scheduler to function.
+behaviour should be very similar to a distributed scheduler. It would need to
+use some other serialisable shared-lock solution in place of
+'distributed.Lock', which requires a distributed scheduler to function.
"""
import threading
@@ -56,7 +60,7 @@
# A dedicated error class, allowing filtering and testing of errors raised here.
-class DaskSchedulerTypeError(ValueError):
+class DaskSchedulerTypeError(ValueError): # noqa: D101
pass
@@ -83,11 +87,13 @@ def get_dask_array_scheduler_type():
Returns one of 'distributed', 'threads' or 'processes'.
The return value is a valid argument for dask.config.set(scheduler=).
- This cannot distinguish between distributed local and remote clusters -- both of
- those simply return 'distributed'.
+ This cannot distinguish between distributed local and remote clusters --
+ both of those simply return 'distributed'.
- NOTE: this takes account of how dask is *currently* configured. It will be wrong
- if the config changes before the compute actually occurs.
+ Notes
+ -----
+ This takes account of how dask is *currently* configured. It will
+ be wrong if the config changes before the compute actually occurs.
"""
if dask_scheduler_is_distributed():
@@ -115,8 +121,12 @@ def get_worker_lock(identity: str):
"""
Return a mutex Lock which can be shared by multiple Dask workers.
- The type of Lock generated depends on the dask scheduler type, which must therefore
- be set up before this is called.
+ The type of Lock generated depends on the dask scheduler type, which must
+ therefore be set up before this is called.
+
+ Parameters
+ ----------
+ identity: str
"""
scheduler_type = get_dask_array_scheduler_type()
diff --git a/lib/iris/fileformats/netcdf/_thread_safe_nc.py b/lib/iris/fileformats/netcdf/_thread_safe_nc.py
index 21c697acab..36e66a98f6 100644
--- a/lib/iris/fileformats/netcdf/_thread_safe_nc.py
+++ b/lib/iris/fileformats/netcdf/_thread_safe_nc.py
@@ -25,7 +25,10 @@
class _ThreadSafeWrapper(ABC):
"""
- Contains a netCDF4 class instance, ensuring wrapping all API calls within _GLOBAL_NETCDF4_LOCK.
+ Contains a netCDF4 class instance, ensuring wrapping all API calls.
+
+ Contains a netCDF4 class instance, ensuring wrapping all API calls within
+ _GLOBAL_NETCDF4_LOCK.
Designed to 'gate keep' all the instance's API calls, but allowing the
same API as if working directly with the instance itself.
@@ -118,7 +121,7 @@ class VariableWrapper(_ThreadSafeWrapper):
def setncattr(self, *args, **kwargs) -> None:
"""
- Calls netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK.
+ Call netCDF4.Variable.setncattr within _GLOBAL_NETCDF4_LOCK.
Only defined explicitly in order to get some mocks to work.
"""
@@ -142,11 +145,12 @@ def dimensions(self) -> typing.List[str]:
def get_dims(self, *args, **kwargs) -> typing.Tuple[DimensionWrapper]:
"""
- Calls netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers.
+ Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Dimensions are simply replaced with their
- respective DimensionWrappers, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call netCDF4.Variable.get_dims() within _GLOBAL_NETCDF4_LOCK,
+ returning DimensionWrappers. The original returned netCDF4.Dimensions
+ are simply replaced with their respective DimensionWrappers, ensuring
+ that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
dimensions_ = list(
@@ -172,11 +176,12 @@ class GroupWrapper(_ThreadSafeWrapper):
@property
def dimensions(self) -> typing.Dict[str, DimensionWrapper]:
"""
- Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrappers.
+ Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Dimensions are simply replaced with their
- respective DimensionWrappers, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Calls dimensions of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK,
+ returning DimensionWrappers. The original returned netCDF4.Dimensions
+ are simply replaced with their respective DimensionWrappers, ensuring
+ that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
dimensions_ = self._contained_instance.dimensions
@@ -187,11 +192,13 @@ def dimensions(self) -> typing.Dict[str, DimensionWrapper]:
def createDimension(self, *args, **kwargs) -> DimensionWrapper:
"""
- Calls createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper.
+ Call createDimension() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Dimension is simply replaced with its
- respective DimensionWrapper, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call createDimension() from netCDF4.Group/Dataset within
+ _GLOBAL_NETCDF4_LOCK, returning DimensionWrapper. The original returned
+ netCDF4.Dimension is simply replaced with its respective
+ DimensionWrapper, ensuring that downstream calls are also performed
+ within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
new_dimension = self._contained_instance.createDimension(
@@ -205,11 +212,12 @@ def createDimension(self, *args, **kwargs) -> DimensionWrapper:
@property
def variables(self) -> typing.Dict[str, VariableWrapper]:
"""
- Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers.
+ Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Variables are simply replaced with their
- respective VariableWrappers, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Calls variables of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK,
+ returning VariableWrappers. The original returned netCDF4.Variables
+ are simply replaced with their respective VariableWrappers, ensuring
+ that downstream calls are also performed within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
variables_ = self._contained_instance.variables
@@ -219,11 +227,13 @@ def variables(self) -> typing.Dict[str, VariableWrapper]:
def createVariable(self, *args, **kwargs) -> VariableWrapper:
"""
- Calls createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrapper.
+ Call createVariable() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Variable is simply replaced with its
- respective VariableWrapper, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call createVariable() from netCDF4.Group/Dataset within
+ _GLOBAL_NETCDF4_LOCK, returning VariableWrapper. The original
+ returned netCDF4.Variable is simply replaced with its respective
+ VariableWrapper, ensuring that downstream calls are also performed
+ within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
new_variable = self._contained_instance.createVariable(
@@ -235,7 +245,10 @@ def get_variables_by_attributes(
self, *args, **kwargs
) -> typing.List[VariableWrapper]:
"""
- Calls get_variables_by_attributes() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning VariableWrappers.
+ Call get_variables_by_attributes() from netCDF4.Group/Dataset.
+
+ Call get_variables_by_attributes() from netCDF4.Group/Dataset
+ within_GLOBAL_NETCDF4_LOCK, returning VariableWrappers.
The original returned netCDF4.Variables are simply replaced with their
respective VariableWrappers, ensuring that downstream calls are
@@ -255,7 +268,10 @@ def get_variables_by_attributes(
@property
def groups(self):
"""
- Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrappers.
+ Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
+
+ Calls groups of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK,
+ returning GroupWrappers.
The original returned netCDF4.Groups are simply replaced with their
respective GroupWrappers, ensuring that downstream calls are
@@ -268,7 +284,10 @@ def groups(self):
@property
def parent(self):
"""
- Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning a GroupWrapper.
+ Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK.
+
+ Calls parent of netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK,
+ returning a GroupWrapper.
The original returned netCDF4.Group is simply replaced with its
respective GroupWrapper, ensuring that downstream calls are
@@ -280,11 +299,13 @@ def parent(self):
def createGroup(self, *args, **kwargs):
"""
- Calls createGroup() from netCDF4.Group/Dataset within _GLOBAL_NETCDF4_LOCK, returning GroupWrapper.
+ Call createGroup() from netCDF4.Group/Dataset.
- The original returned netCDF4.Group is simply replaced with its
- respective GroupWrapper, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call createGroup() from netCDF4.Group/Dataset within
+ _GLOBAL_NETCDF4_LOCK, returning GroupWrapper. The original returned
+ netCDF4.Group is simply replaced with its respective GroupWrapper,
+ ensuring that downstream calls are also performed within
+ _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
new_group = self._contained_instance.createGroup(*args, **kwargs)
@@ -305,11 +326,12 @@ class DatasetWrapper(GroupWrapper):
@classmethod
def fromcdl(cls, *args, **kwargs):
"""
- Calls netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK, returning a DatasetWrapper.
+ Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK.
- The original returned netCDF4.Dataset is simply replaced with its
- respective DatasetWrapper, ensuring that downstream calls are
- also performed within _GLOBAL_NETCDF4_LOCK.
+ Call netCDF4.Dataset.fromcdl() within _GLOBAL_NETCDF4_LOCK,
+ returning a DatasetWrapper. The original returned netCDF4.Dataset is
+ simply replaced with its respective DatasetWrapper, ensuring that
+ downstream calls are also performed within _GLOBAL_NETCDF4_LOCK.
"""
with _GLOBAL_NETCDF4_LOCK:
instance = cls.CONTAINED_CLASS.fromcdl(*args, **kwargs)
@@ -330,12 +352,13 @@ def __init__(self, shape, dtype, path, variable_name, fill_value):
@property
def ndim(self):
+ # noqa: D102
return len(self.shape)
def __getitem__(self, keys):
# Using a DatasetWrapper causes problems with invalid ID's and the
- # netCDF4 library, presumably because __getitem__ gets called so many
- # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead.
+ # netCDF4 library, presumably because __getitem__ gets called so many
+ # times by Dask. Use _GLOBAL_NETCDF4_LOCK directly instead.
with _GLOBAL_NETCDF4_LOCK:
dataset = netCDF4.Dataset(self.path)
try:
@@ -364,11 +387,14 @@ def __setstate__(self, state):
class NetCDFWriteProxy:
"""
- The "opposite" of a NetCDFDataProxy : An object mimicking the data access of a
- netCDF4.Variable, but where the data is to be ***written to***.
+ An object mimicking the data access of a netCDF4.Variable.
+
+ The "opposite" of a NetCDFDataProxy : An object mimicking the data access
+ of a netCDF4.Variable, but where the data is to be ***written to***.
- It encapsulates the netcdf file and variable which are actually to be written to.
- This opens the file each time, to enable writing the data chunk, then closes it.
+ It encapsulates the netcdf file and variable which are actually to be
+ written to. This opens the file each time, to enable writing the data
+ chunk, then closes it.
TODO: could be improved with a caching scheme, but this just about works.
"""
diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py
index 29202af89e..ea127cc197 100644
--- a/lib/iris/fileformats/netcdf/loader.py
+++ b/lib/iris/fileformats/netcdf/loader.py
@@ -4,8 +4,7 @@
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
-Module to support the loading of Iris cubes from NetCDF files, also using the CF
-conventions for metadata interpretation.
+Support loading Iris cubes from NetCDF files using the CF conventions for metadata interpretation.
See : `NetCDF User's Guide `_
and `netCDF4 python module `_.
@@ -146,7 +145,6 @@ def _actions_activation_stats(engine, cf_name):
def _set_attributes(attributes, key, value):
"""Set attributes dictionary, converting unicode strings appropriately."""
-
if isinstance(value, str):
try:
attributes[str(key)] = str(value)
@@ -158,6 +156,8 @@ def _set_attributes(attributes, key, value):
def _add_unused_attributes(iris_object, cf_var):
"""
+ Populate the attributes of a cf element with the "unused" attributes.
+
Populate the attributes of a cf element with the "unused" attributes
from the associated CF-netCDF variable. That is, all those that aren't CF
reserved terms.
@@ -336,10 +336,7 @@ def fix_attributes_all_elements(role_name):
def _load_aux_factory(engine, cube):
- """
- Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory.
-
- """
+ """Convert any CF-netCDF dimensionless coordinate to an AuxCoordFactory."""
formula_type = engine.requires.get("formula_type")
if formula_type in [
"atmosphere_sigma_coordinate",
@@ -465,9 +462,10 @@ def _translate_constraints_to_var_callback(constraints):
"""
Translate load constraints into a simple data-var filter function, if possible.
- Returns:
- * function(cf_var:CFDataVariable): --> bool,
- or None.
+ Returns
+ -------
+ function : (cf_var:CFDataVariable)
+ bool, or None.
For now, ONLY handles a single NameConstraint with no 'STASH' component.
@@ -507,25 +505,24 @@ def inner(cf_datavar):
def load_cubes(file_sources, callback=None, constraints=None):
"""
- Loads cubes from a list of NetCDF filenames/OPeNDAP URLs.
-
- Args:
+ Load cubes from a list of NetCDF filenames/OPeNDAP URLs.
- * file_sources (string/list):
+ Parameters
+ ----------
+ file_sources : str or list
One or more NetCDF filenames/OPeNDAP URLs to load from.
OR open datasets.
- Kwargs:
-
- * callback (callable function):
+ callback: function, optional
Function which can be passed on to :func:`iris.io.run_callback`.
- Returns:
- Generator of loaded NetCDF :class:`iris.cube.Cube`.
+ Returns
+ -------
+ Generator of loaded NetCDF :class:`iris.cube.Cube`.
"""
# TODO: rationalise UGRID/mesh handling once experimental.ugrid is folded
- # into standard behaviour.
+ # into standard behaviour.
# Deferred import to avoid circular imports.
from iris.experimental.ugrid.cf import CFUGridReader
from iris.experimental.ugrid.load import (
diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py
index 011f74892d..2472047bb2 100644
--- a/lib/iris/fileformats/netcdf/saver.py
+++ b/lib/iris/fileformats/netcdf/saver.py
@@ -4,6 +4,8 @@
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
+Module to support the saving of Iris cubes to a NetCDF file.
+
Module to support the saving of Iris cubes to a NetCDF file, also using the CF
conventions for metadata interpretation.
@@ -178,19 +180,19 @@ def __init__(self):
self._map = []
def append(self, name, coord):
- """
- Append the given name and coordinate pair to the mapping.
+ """Append the given name and coordinate pair to the mapping.
- Args:
-
- * name:
+ Parameters
+ ----------
+ name:
CF name of the associated coordinate.
- * coord:
+ coord:
The coordinate of the associated CF name.
- Returns:
- None.
+ Returns
+ -------
+ None.
"""
self._map.append(CFNameCoordMap._Map(name, coord))
@@ -198,26 +200,24 @@ def append(self, name, coord):
@property
def names(self):
"""Return all the CF names."""
-
return [pair.name for pair in self._map]
@property
def coords(self):
"""Return all the coordinates."""
-
return [pair.coord for pair in self._map]
def name(self, coord):
- """
- Return the CF name, given a coordinate, or None if not recognised.
+ """Return the CF name, given a coordinate, or None if not recognised.
- Args:
-
- * coord:
+ Parameters
+ ----------
+ coord:
The coordinate of the associated CF name.
- Returns:
- Coordinate or None.
+ Returns
+ -------
+ Coordinate or None.
"""
result = None
@@ -228,17 +228,16 @@ def name(self, coord):
return result
def coord(self, name):
- """
- Return the coordinate, given a CF name, or None if not recognised.
+ """Return the coordinate, given a CF name, or None if not recognised.
- Args:
-
- * name:
+ Parameters
+ ----------
+ name:
CF name of the associated coordinate, or None if not recognised.
- Returns:
- CF name or None.
-
+ Returns
+ -------
+ CF name or None.
"""
result = None
for pair in self._map:
@@ -250,6 +249,8 @@ def coord(self, name):
def _bytes_if_ascii(string):
"""
+ Convert string to a byte string (str in py2k, bytes in py3k).
+
Convert the given string to a byte string (str in py2k, bytes in py3k)
if the given string can be encoded to ascii, else maintain the type
of the inputted string.
@@ -268,6 +269,8 @@ def _bytes_if_ascii(string):
def _setncattr(variable, name, attribute):
"""
+ Put the given attribute on the given netCDF4 Data type.
+
Put the given attribute on the given netCDF4 Data type, casting
attributes as we go to bytes rather than unicode.
@@ -321,9 +324,7 @@ def _data_fillvalue_check(arraylib, data, check_value):
class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning):
- """
- Backwards compatible form of :class:`iris.exceptions.IrisSaverFillValueWarning`.
- """
+ """Backwards compatible form of :class:`iris.exceptions.IrisSaverFillValueWarning`."""
# TODO: remove at the next major release.
pass
@@ -331,6 +332,8 @@ class SaverFillValueWarning(iris.exceptions.IrisSaverFillValueWarning):
def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False):
"""
+ Work out whether there was a possible or actual fill-value collision.
+
From the given information, work out whether there was a possible or actual
fill-value collision, and if so construct a warning.
@@ -342,7 +345,7 @@ def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False):
whether the data array was masked
contains_fill_value : bool
whether the data array contained the fill-value
- warn : bool
+ warn : bool, optional
if True, also issue any resulting warning immediately.
Returns
@@ -388,7 +391,7 @@ class Saver:
def __init__(self, filename, netcdf_format, compute=True):
"""
- A manager for saving netcdf files.
+ Manage saving netcdf files.
Parameters
----------
@@ -547,60 +550,48 @@ def write(
fill_value=None,
):
"""
- Wrapper for saving cubes to a NetCDF file.
+ Wrap for saving cubes to a NetCDF file.
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
-
- Kwargs:
-
- * local_keys (iterable of strings):
+ local_keys : iterable of str
An interable of cube attribute keys. Any cube attributes with
matching keys will become attributes on the data variable rather
than global attributes.
-
- * unlimited_dimensions (iterable of strings and/or
- :class:`iris.coords.Coord` objects):
+ unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord`
List of coordinate names (or coordinate objects)
corresponding to coordinate dimensions of `cube` to save with the
NetCDF dimension variable length 'UNLIMITED'. By default, no
unlimited dimensions are saved. Only the 'NETCDF4' format
supports multiple 'UNLIMITED' dimensions.
-
- * zlib (bool):
+ zlib : bool
If `True`, the data will be compressed in the netCDF file using
gzip compression (default `False`).
-
- * complevel (int):
+ complevel : int
An integer between 1 and 9 describing the level of compression
desired (default 4). Ignored if `zlib=False`.
-
- * shuffle (bool):
+ shuffle : bool
If `True`, the HDF5 shuffle filter will be applied before
compressing the data (default `True`). This significantly improves
compression. Ignored if `zlib=False`.
-
- * fletcher32 (bool):
+ fletcher32 : bool
If `True`, the Fletcher32 HDF5 checksum algorithm is activated to
detect errors. Default `False`.
-
- * contiguous (bool):
+ contiguous : bool
If `True`, the variable data is stored contiguously on disk.
Default `False`. Setting to `True` for a variable with an unlimited
dimension will trigger an error.
-
- * chunksizes (tuple of int):
+ chunksizes : tuple of int
Used to manually specify the HDF5 chunksizes for each dimension of
the variable. A detailed discussion of HDF chunking and I/O
- performance is available here:
- https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html.
+ performance is available
+ `here `__.
Basically, you want the chunk size for each dimension to match
as closely as possible the size of the data block that users will
read from the file. `chunksizes` cannot be set if `contiguous=True`.
-
- * endian (string):
+ endian : str
Used to control whether the data is stored in little or big endian
format on disk. Possible values are 'little', 'big' or 'native'
(default). The library will automatically handle endian conversions
@@ -608,8 +599,7 @@ def write(
on a computer with the opposite format as the one used to create
the file, there may be some performance advantage to be gained by
setting the endian-ness.
-
- * least_significant_digit (int):
+ least_significant_digit : int
If `least_significant_digit` is specified, variable data will be
truncated (quantized). In conjunction with `zlib=True` this
produces 'lossy', but significantly more efficient compression. For
@@ -617,17 +607,16 @@ def write(
using `numpy.around(scale*data)/scale`, where `scale = 2**bits`,
and `bits` is determined so that a precision of 0.1 is retained (in
this case `bits=4`). From
- http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml:
+ `here `__:
"least_significant_digit -- power of ten of the smallest decimal
place in unpacked data that is a reliable value". Default is
`None`, or no quantization, or 'lossless' compression.
-
- * packing (type or string or dict or list): A numpy integer datatype
- (signed or unsigned) or a string that describes a numpy integer
- dtype(i.e. 'i2', 'short', 'u4') or a dict of packing parameters as
- described below. This provides support for netCDF data packing as
- described in
- https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/best_practices.html#bp_Packed-Data-Values
+ packing : type or str or dict or list
+ A numpy integer datatype (signed or unsigned) or a string that
+ describes a numpy integer dtype(i.e. 'i2', 'short', 'u4') or a
+ dict of packing parameters as described below. This provides
+ support for netCDF data packing as described
+ `here `__.
If this argument is a type (or type string), appropriate values of
scale_factor and add_offset will be automatically calculated based
on `cube.data` and possible masking. For more control, pass a dict
@@ -637,20 +626,20 @@ def write(
manually using a dict to avoid this. The default is `None`, in
which case the datatype is determined from the cube and no packing
will occur.
-
- * fill_value:
+ fill_value:
The value to use for the `_FillValue` attribute on the netCDF
variable. If `packing` is specified the value of `fill_value`
should be in the domain of the packed data.
- Returns:
- None.
-
- .. note::
+ Returns
+ -------
+ None.
- The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
- `chunksizes` and `endian` keywords are silently ignored for netCDF
- 3 files that do not use HDF5.
+ Notes
+ -----
+ The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
+ `chunksizes` and `endian` keywords are silently ignored for netCDF
+ 3 files that do not use HDF5.
"""
if unlimited_dimensions is None:
@@ -757,6 +746,8 @@ def write(
@staticmethod
def check_attribute_compliance(container, data_dtype):
+ """Check attributte complliance."""
+
def _coerce_value(val_attr, val_attr_value, data_dtype):
val_attr_tmp = np.array(val_attr_value, dtype=data_dtype)
if (val_attr_tmp != val_attr_value).any():
@@ -788,15 +779,15 @@ def _coerce_value(val_attr, val_attr_value, data_dtype):
container.attributes[val_attr] = new_val
def update_global_attributes(self, attributes=None, **kwargs):
- """
+ """Update the CF global attributes.
+
Update the CF global attributes based on the provided
iterable/dictionary and/or keyword arguments.
- Args:
-
- * attributes (dict or iterable of key, value pairs):
+ Parameters
+ ----------
+ attributes: dict or iterable of key, value pairs
CF global attributes to be updated.
-
"""
if attributes is not None:
# Handle sequence e.g. [('fruit', 'apple'), ...].
@@ -812,23 +803,18 @@ def update_global_attributes(self, attributes=None, **kwargs):
def _create_cf_dimensions(
self, cube, dimension_names, unlimited_dimensions=None
):
- """
- Create the CF-netCDF data dimensions.
+ """Create the CF-netCDF data dimensions.
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` in which to lookup coordinates.
-
- Kwargs:
-
- * unlimited_dimensions (iterable of strings and/or
- :class:`iris.coords.Coord` objects):
+ unlimited_dimensions: iterable of strings and/or :class:`iris.coords.Coord` objects):
List of coordinates to make unlimited (None by default).
- Returns:
- None.
-
+ Returns
+ -------
+ None.
"""
unlimited_dim_names = []
if unlimited_dimensions is not None:
@@ -855,6 +841,8 @@ def _create_cf_dimensions(
def _add_mesh(self, cube_or_mesh):
"""
+ Add the cube's mesh, and all related variables to the dataset.
+
Add the cube's mesh, and all related variables to the dataset.
Includes all the mesh-element coordinate and connectivity variables.
@@ -863,17 +851,16 @@ def _add_mesh(self, cube_or_mesh):
Here, we do *not* add the relevant referencing attributes to the
data-variable, because we want to create the data-variable later.
- Args:
-
- * cube_or_mesh (:class:`iris.cube.Cube`
- or :class:`iris.experimental.ugrid.Mesh`):
+ Parameters
+ ----------
+ cube_or_mesh: :class:`iris.cube.Cube`or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- Returns:
- * cf_mesh_name (string or None):
+ Returns
+ -------
+ cf_mesh_name: string or None
The name of the mesh variable created, or None if the cube does not
have a mesh.
-
"""
cf_mesh_name = None
@@ -991,6 +978,8 @@ def _add_inner_related_vars(
self, cube, cf_var_cube, dimension_names, coordlike_elements
):
"""
+ Create a set of variables for aux-coords, ancillaries or cell-measures.
+
Create a set of variables for aux-coords, ancillaries or cell-measures,
and attach them to the parent data variable.
@@ -1035,17 +1024,16 @@ def _add_inner_related_vars(
def _add_aux_coords(self, cube, cf_var_cube, dimension_names):
"""
- Add aux. coordinate to the dataset and associate with the data variable
-
- Args:
+ Add aux. coordinate to the dataset and associate with the data variable.
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`):
+ cf_var_cube: :class:`netcdf.netcdf_variable`
cf variable cube representation.
- * dimension_names (list):
+ dimension_names: list
Names associated with the dimensions of the cube.
-
"""
from iris.experimental.ugrid.mesh import (
Mesh,
@@ -1077,17 +1065,16 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names):
def _add_cell_measures(self, cube, cf_var_cube, dimension_names):
"""
- Add cell measures to the dataset and associate with the data variable
-
- Args:
+ Add cell measures to the dataset and associate with the data variable.
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`):
+ cf_var_cube: :class:`netcdf.netcdf_variable`
cf variable cube representation.
- * dimension_names (list):
+ dimension_names: list
Names associated with the dimensions of the cube.
-
"""
return self._add_inner_related_vars(
cube,
@@ -1098,18 +1085,16 @@ def _add_cell_measures(self, cube, cf_var_cube, dimension_names):
def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names):
"""
- Add ancillary variables measures to the dataset and associate with the
- data variable
+ Add ancillary variables measures to the dataset and associate with the data variable.
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`):
+ cf_var_cube: :class:`netcdf.netcdf_variable`
cf variable cube representation.
- * dimension_names (list):
+ dimension_names: list
Names associated with the dimensions of the cube.
-
"""
return self._add_inner_related_vars(
cube,
@@ -1122,13 +1107,12 @@ def _add_dim_coords(self, cube, dimension_names):
"""
Add coordinate variables to NetCDF dataset.
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * dimension_names (list):
+ dimension_names: list
Names associated with the dimensions of the cube.
-
"""
# Ensure we create the netCDF coordinate variables first.
for coord in cube.dim_coords:
@@ -1142,19 +1126,20 @@ def _add_dim_coords(self, cube, dimension_names):
def _add_aux_factories(self, cube, cf_var_cube, dimension_names):
"""
- Modifies the variables of the NetCDF dataset to represent
+ Represent the presence of dimensionless vertical coordinates.
+
+ Modify the variables of the NetCDF dataset to represent
the presence of dimensionless vertical coordinates based on
the aux factories of the cube (if any).
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`)
+ cf_var_cube: :class:`netcdf.netcdf_variable`
CF variable cube representation.
- * dimension_names (list):
+ dimension_names: list
Names associated with the dimensions of the cube.
-
"""
primaries = []
for factory in cube.aux_factories:
@@ -1240,23 +1225,23 @@ def _get_dim_names(self, cube_or_mesh):
"""
Determine suitable CF-netCDF data dimension names.
- Args:
-
- * cube_or_mesh (:class:`iris.cube.Cube`
- or :class:`iris.experimental.ugrid.Mesh`):
+ Parameters
+ ----------
+ cube_or_mesh: :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- Returns:
- mesh_dimensions, cube_dimensions
- * mesh_dimensions (list of string):
- A list of the mesh dimensions of the attached mesh, if any.
- * cube_dimensions (list of string):
- A lists of dimension names for each dimension of the cube
+ Returns
+ -------
+ mesh_dimensions: list of str
+ A list of the mesh dimensions of the attached mesh, if any.
+ cube_dimensions: list of str
+ A lists of dimension names for each dimension of the cube
- ..note::
- The returned lists are in the preferred file creation order.
- One of the mesh dimensions will typically also appear in the cube
- dimensions.
+ Notes
+ -----
+ The returned lists are in the preferred file creation order.
+ One of the mesh dimensions will typically also appear in the cube
+ dimensions.
"""
@@ -1264,6 +1249,8 @@ def record_dimension(
names_list, dim_name, length, matching_coords=None
):
"""
+ Record a file dimension, its length and associated "coordinates".
+
Record a file dimension, its length and associated "coordinates"
(which may in fact also be connectivities).
@@ -1462,16 +1449,17 @@ def record_dimension(
@staticmethod
def cf_valid_var_name(var_name):
- """
- Return a valid CF var_name given a potentially invalid name.
-
- Args:
+ """Return a valid CF var_name given a potentially invalid name.
- * var_name (str):
+ Parameters
+ ----------
+ var_name: str
The var_name to normalise
- Returns:
- A var_name suitable for passing through for variable creation.
+ Returns
+ -------
+ str
+ var_name suitable for passing through for variable creation.
"""
# Replace invalid characters with an underscore ("_").
@@ -1486,17 +1474,17 @@ def _cf_coord_standardised_units(coord):
"""
Determine a suitable units from a given coordinate.
- Args:
-
- * coord (:class:`iris.coords.Coord`):
+ Parameters
+ ----------
+ coord: :class:`iris.coords.Coord`
A coordinate of a cube.
- Returns:
+ Returns
+ -------
+ units
The (standard_name, long_name, unit) of the given
:class:`iris.coords.Coord` instance.
-
"""
-
units = str(coord.units)
# Set the 'units' of 'latitude' and 'longitude' coordinates specified
# in 'degrees' to 'degrees_north' and 'degrees_east' respectively,
@@ -1548,17 +1536,18 @@ def _create_cf_bounds(self, coord, cf_var, cf_name):
"""
Create the associated CF-netCDF bounds variable.
- Args:
-
- * coord (:class:`iris.coords.Coord`):
+ Parameters
+ ----------
+ coord: :class:`iris.coords.Coord`
A coordinate of a cube.
- * cf_var:
+ cf_var:
CF-netCDF variable
- * cf_name (string):
+ cf_name: str
name of the CF-NetCDF variable.
- Returns:
- None
+ Returns
+ -------
+ None
"""
if hasattr(coord, "has_bounds") and coord.has_bounds():
@@ -1606,15 +1595,17 @@ def _create_cf_bounds(self, coord, cf_var, cf_name):
def _get_cube_variable_name(self, cube):
"""
- Returns a CF-netCDF variable name for the given cube.
-
- Args:
+ Return a CF-netCDF variable name for the given cube.
- * cube (class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube`
An instance of a cube for which a CF-netCDF variable
name is required.
- Returns:
+ Returns
+ -------
+ str
A CF-netCDF variable name as a string.
"""
@@ -1629,18 +1620,19 @@ def _get_cube_variable_name(self, cube):
def _get_coord_variable_name(self, cube_or_mesh, coord):
"""
- Returns a CF-netCDF variable name for a given coordinate-like element.
+ Return a CF-netCDF variable name for a given coordinate-like element.
- Args:
-
- * cube_or_mesh (:class:`iris.cube.Cube`
- or :class:`iris.experimental.ugrid.Mesh`):
+ Parameters
+ ----------
+ cube_or_mesh: :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- * coord (:class:`iris.coords._DimensionalMetadata`):
+ coord: :class:`iris.coords._DimensionalMetadata`
An instance of a coordinate (or similar), for which a CF-netCDF
variable name is required.
- Returns:
+ Returns
+ -------
+ str
A CF-netCDF variable name as a string.
"""
@@ -1691,15 +1683,17 @@ def _get_coord_variable_name(self, cube_or_mesh, coord):
def _get_mesh_variable_name(self, mesh):
"""
- Returns a CF-netCDF variable name for the mesh.
-
- Args:
+ Return a CF-netCDF variable name for the mesh.
- * mesh (:class:`iris.experimental.ugrid.mesh.Mesh`):
+ Parameters
+ ----------
+ mesh: :class:`iris.experimental.ugrid.mesh.Mesh`
An instance of a Mesh for which a CF-netCDF variable name is
required.
- Returns:
+ Returns
+ -------
+ str
A CF-netCDF variable name as a string.
"""
@@ -1718,12 +1712,14 @@ def _create_mesh(self, mesh):
"""
Create a mesh variable in the netCDF dataset.
- Args:
-
- * mesh (:class:`iris.experimental.ugrid.mesh.Mesh`):
+ Parameters
+ ----------
+ mesh: :class:`iris.experimental.ugrid.mesh.Mesh`
The Mesh to be saved to CF-netCDF file.
- Returns:
+ Returns
+ -------
+ str
The string name of the associated CF-netCDF variable saved.
"""
@@ -1798,6 +1794,8 @@ def _create_generic_cf_array_var(
fill_value=None,
):
"""
+ Create theCF-netCDF variable given dimensional_metadata.
+
Create the associated CF-netCDF variable in the netCDF dataset for the
given dimensional_metadata.
@@ -1805,33 +1803,32 @@ def _create_generic_cf_array_var(
If the metadata element is a coord, it may also contain bounds.
In which case, an additional var is created and linked to it.
- Args:
-
- * cube_or_mesh (:class:`iris.cube.Cube`
- or :class:`iris.experimental.ugrid.Mesh`):
+ Parameters
+ ----------
+ cube_or_mesh: :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- * cube_dim_names (list of string):
+ cube_dim_names: list of str
The name of each dimension of the cube.
- * element:
+ element: :class:`iris.coords._DimensionalMetadata`
An Iris :class:`iris.coords._DimensionalMetadata`, belonging to the
cube. Provides data, units and standard/long/var names.
Not used if 'element_dims' is not None.
- * element_dims (list of string, or None):
+ element_dims : list of string, or None
If set, contains the variable dimension (names),
otherwise these are taken from `element.cube_dims[cube]`.
For Mesh components (element coordinates and connectivities), this
*must* be passed in, as "element.cube_dims" does not function.
- * fill_value (number or None):
+ fill_value: number or None
If set, create the variable with this fill-value, and fill any
masked data points with this value.
If not set, standard netcdf4-python behaviour : the variable has no
'_FillValue' property, and uses the "standard" fill-value for its
type.
- Returns:
- var_name (string):
- The name of the CF-netCDF variable created.
-
+ Returns
+ -------
+ str
+ The name of the CF-netCDF variable created.
"""
# Support cube or mesh save.
from iris.cube import Cube
@@ -1947,16 +1944,17 @@ def _create_cf_cell_methods(self, cube, dimension_names):
"""
Create CF-netCDF string representation of a cube cell methods.
- Args:
-
- * cube (:class:`iris.cube.Cube`) or cubelist
- (:class:`iris.cube.CubeList`):
+ Parameters
+ ----------
+ cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of
cubes to be saved to a netCDF file.
- * dimension_names (list):
+ dimension_names : list
Names associated with the dimensions of the cube.
- Returns:
+ Returns
+ -------
+ str
CF-netCDF string representation of a cube cell methods.
"""
@@ -1996,20 +1994,22 @@ def _create_cf_cell_methods(self, cube, dimension_names):
def _create_cf_grid_mapping(self, cube, cf_var_cube):
"""
+ Create CF-netCDF grid mapping and associated CF-netCDF variable.
+
Create CF-netCDF grid mapping variable and associated CF-netCDF
data variable grid mapping attribute.
- Args:
-
- * cube (:class:`iris.cube.Cube`) or cubelist
- (:class:`iris.cube.CubeList`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of
cubes to be saved to a netCDF file.
- * cf_var_cube (:class:`netcdf.netcdf_variable`):
+ cf_var_cube: :class:`netcdf.netcdf_variable`
cf variable cube representation.
- Returns:
- None
+ Returns
+ -------
+ None
"""
cs = cube.coord_system("CoordSystem")
@@ -2265,30 +2265,27 @@ def _create_cf_data_variable(
**kwargs,
):
"""
- Create CF-netCDF data variable for the cube and any associated grid
- mapping.
+ Create CF-netCDF data variable for the cube and any associated grid mapping.
- Args:
-
- * cube (:class:`iris.cube.Cube`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube`
The associated cube being saved to CF-netCDF file.
- * dimension_names (list):
+ dimension_names: list
String names for each dimension of the cube.
-
- Kwargs:
-
- * local_keys (iterable of strings):
- * see :func:`iris.fileformats.netcdf.Saver.write`
- * packing (type or string or dict or list):
- * see :func:`iris.fileformats.netcdf.Saver.write`
- * fill_value:
- * see :func:`iris.fileformats.netcdf.Saver.write`
+ local_keys: iterable of str, optional
+ See :func:`iris.fileformats.netcdf.Saver.write`
+ packing: type or string or dict or list, optional
+ See :func:`iris.fileformats.netcdf.Saver.write`
+ fill_value: optional
+ See :func:`iris.fileformats.netcdf.Saver.write`
All other keywords are passed through to the dataset's `createVariable`
method.
- Returns:
- The newly created CF-netCDF data variable.
+ Returns
+ -------
+ The newly created CF-netCDF data variable.
"""
# Get the values in a form which is valid for the file format.
@@ -2433,13 +2430,14 @@ def _increment_name(self, varname):
Avoidance of conflicts between variable names, where the name is
incremented to distinguish it from others.
- Args:
-
- * varname (string):
+ Parameters
+ ----------
+ varname: str
Variable name to increment.
- Returns:
- Incremented varname.
+ Returns
+ -------
+ Incremented varname.
"""
num = 0
@@ -2546,18 +2544,19 @@ def store(data, cf_var, fill_info):
def delayed_completion(self) -> Delayed:
"""
- Create and return a :class:`dask.delayed.Delayed` to perform file completion
- for delayed saves.
+ Perform file completion for delayed saves.
- This contains all the delayed writes, which complete the file by filling out
- the data of variables initially created empty, and also the checks for
- potential fill-value collisions.
- When computed, it returns a list of any warnings which were generated in the
- save operation.
+ Create and return a :class:`dask.delayed.Delayed` to perform file
+ completion for delayed saves.
+
+ This contains all the delayed writes, which complete the file by
+ filling out the data of variables initially created empty, and also the
+ checks for potential fill-value collisions. When computed, it returns
+ a list of any warnings which were generated in the save operation.
Returns
-------
- completion : :class:`dask.delayed.Delayed`
+ :class:`dask.delayed.Delayed`
Notes
-----
@@ -2623,12 +2622,12 @@ def complete(self, issue_warnings=True) -> List[Warning]:
Parameters
----------
- issue_warnings : bool, default = True
+ issue_warnings: bool, default = True
If true, issue all the resulting warnings with :func:`warnings.warn`.
Returns
-------
- warnings : list of Warning
+ warnings: list of Warning
Any warnings that were raised while writing delayed data.
"""
@@ -2670,7 +2669,7 @@ def save(
fill_value=None,
compute=True,
):
- """
+ r"""
Save cube(s) to a netCDF file, given the cube and the filename.
* Iris will write CF 1.7 compliant NetCDF files.
@@ -2685,13 +2684,12 @@ def save(
status of the cube's data payload, unless the netcdf_format is explicitly
specified to be 'NETCDF3' or 'NETCDF3_CLASSIC'.
- Args:
-
- * cube (:class:`iris.cube.Cube` or :class:`iris.cube.CubeList`):
+ Parameters
+ ----------
+ cube: :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or other
iterable of cubes to be saved to a netCDF file.
-
- * filename (string):
+ filename: str
Name of the netCDF file to save the cube(s).
**Or** an open, writeable :class:`netCDF4.Dataset`, or compatible object.
@@ -2699,56 +2697,45 @@ def save(
When saving to a dataset, ``compute`` **must** be ``False`` :
See the ``compute`` parameter.
- Kwargs:
-
- * netcdf_format (string):
+ netcdf_format: string
Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format.
-
- * local_keys (iterable of strings):
+ local_keys: iterable of str, optional
An interable of cube attribute keys. Any cube attributes with
matching keys will become attributes on the data variable rather
than global attributes.
-
- * unlimited_dimensions (iterable of strings and/or
- :class:`iris.coords.Coord` objects):
+ unlimited_dimensions: iterable of str and/or :class:`iris.coords.Coord` objects, optional
List of coordinate names (or coordinate objects) corresponding
to coordinate dimensions of `cube` to save with the NetCDF dimension
variable length 'UNLIMITED'. By default, no unlimited dimensions are
saved. Only the 'NETCDF4' format supports multiple 'UNLIMITED'
dimensions.
-
- * zlib (bool):
+ zlib: bool, optional
If `True`, the data will be compressed in the netCDF file using gzip
compression (default `False`).
-
- * complevel (int):
+ complevel: int
An integer between 1 and 9 describing the level of compression desired
(default 4). Ignored if `zlib=False`.
-
- * shuffle (bool):
+ shuffle: bool, optional
If `True`, the HDF5 shuffle filter will be applied before compressing
the data (default `True`). This significantly improves compression.
Ignored if `zlib=False`.
-
- * fletcher32 (bool):
+ fletcher32: bool, optional
If `True`, the Fletcher32 HDF5 checksum algorithm is activated to
detect errors. Default `False`.
-
- * contiguous (bool):
+ contiguous: bool, optional
If `True`, the variable data is stored contiguously on disk. Default
`False`. Setting to `True` for a variable with an unlimited dimension
will trigger an error.
-
- * chunksizes (tuple of int):
+ chunksizes: tuple of int, optional
Used to manually specify the HDF5 chunksizes for each dimension of the
variable. A detailed discussion of HDF chunking and I/O performance is
- available here: https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/netcdf_perf_chunking.html.
+ available
+ `here `__.
Basically, you want the chunk size for each dimension to match as
closely as possible the size of the data block that users will read
from the file. `chunksizes` cannot be set if `contiguous=True`.
-
- * endian (string):
+ endian: str
Used to control whether the data is stored in little or big endian
format on disk. Possible values are 'little', 'big' or 'native'
(default). The library will automatically handle endian conversions
@@ -2756,8 +2743,7 @@ def save(
computer with the opposite format as the one used to create the file,
there may be some performance advantage to be gained by setting the
endian-ness.
-
- * least_significant_digit (int):
+ least_significant_digit: int, optional
If `least_significant_digit` is specified, variable data will be
truncated (quantized). In conjunction with `zlib=True` this produces
'lossy', but significantly more efficient compression. For example, if
@@ -2765,17 +2751,17 @@ def save(
`numpy.around(scale*data)/scale`, where `scale = 2**bits`, and `bits`
is determined so that a precision of 0.1 is retained (in this case
`bits=4`). From
- http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml:
+
"least_significant_digit -- power of ten of the smallest decimal place
in unpacked data that is a reliable value". Default is `None`, or no
quantization, or 'lossless' compression.
-
- * packing (type or string or dict or list): A numpy integer datatype
- (signed or unsigned) or a string that describes a numpy integer dtype
- (i.e. 'i2', 'short', 'u4') or a dict of packing parameters as described
- below or an iterable of such types, strings, or dicts.
- This provides support for netCDF data packing as described in
- https://www.unidata.ucar.edu/software/netcdf/documentation/NUG/best_practices.html#bp_Packed-Data-Values
+ packing: type or str or dict or list, optional
+ A numpy integer datatype (signed or unsigned) or a string that
+ describes a numpy integer dtype (i.e. 'i2', 'short', 'u4') or a dict
+ of packing parameters as described below or an iterable of such types,
+ strings, or dicts. This provides support for netCDF data packing as
+ described in
+ `here `__
If this argument is a type (or type string), appropriate values of
scale_factor and add_offset will be automatically calculated based
on `cube.data` and possible masking. For more control, pass a dict with
@@ -2785,18 +2771,16 @@ def save(
avoid this. The default is `None`, in which case the datatype is
determined from the cube and no packing will occur. If this argument is
a list it must have the same number of elements as `cube` if `cube` is
- a `:class:`iris.cube.CubeList`, or one element, and each element of
+ a :class:`iris.cube.CubeList`, or one element, and each element of
this argument will be applied to each cube separately.
-
- * fill_value (numeric or list):
+ fill_value: numeric or list, optional
The value to use for the `_FillValue` attribute on the netCDF variable.
If `packing` is specified the value of `fill_value` should be in the
domain of the packed data. If this argument is a list it must have the
same number of elements as `cube` if `cube` is a
- `:class:`iris.cube.CubeList`, or a single element, and each element of
+ :class:`iris.cube.CubeList`, or a single element, and each element of
this argument will be applied to each cube separately.
-
- * compute (bool):
+ compute: bool, optional
Default is ``True``, meaning complete the file immediately, and return ``None``.
When ``False``, create the output file but don't write any lazy array content to
@@ -2819,11 +2803,12 @@ def save(
must (re-)open the dataset for writing, which will fail if the file is
still open for writing by the caller.
- Returns:
- result (None, or dask.delayed.Delayed):
- If `compute=True`, returns `None`.
- Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed
- writing to fill in the variables data.
+ Returns
+ -------
+ result: None or dask.delayed.Delayed
+ If `compute=True`, returns `None`.
+ Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed
+ writing to fill in the variables data.
.. note::
@@ -2831,10 +2816,11 @@ def save(
`chunksizes` and `endian` keywords are silently ignored for netCDF 3
files that do not use HDF5.
- .. seealso::
-
- NetCDF Context manager (:class:`~Saver`).
-
+ Notes
+ -----
+ The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
+ `chunksizes` and `endian` keywords are silently ignored for netCDF 3
+ files that do not use HDF5.
"""
from iris.cube import Cube, CubeList
diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py
index 4e5004ff10..34be79135c 100644
--- a/lib/iris/io/__init__.py
+++ b/lib/iris/io/__init__.py
@@ -3,10 +3,7 @@
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
-"""
-Provides an interface to manage URI scheme support in iris.
-
-"""
+"""Provides an interface to manage URI scheme support in iris."""
import collections
from collections import OrderedDict
@@ -40,29 +37,27 @@ def __setitem__(self, key, value):
def run_callback(callback, cube, field, filename):
"""
- Runs the callback mechanism given the appropriate arguments.
-
- Args:
+ Run the callback mechanism given the appropriate arguments.
- * callback:
+ Parameters
+ ----------
+ callback
A function to add metadata from the originating field and/or URI which
obeys the following rules:
- 1. Function signature must be: ``(cube, field, filename)``.
- 2. Modifies the given cube inplace, unless a new cube is
- returned by the function.
- 3. If the cube is to be rejected the callback must raise
- an :class:`iris.exceptions.IgnoreCubeException`.
+ 1. Function signature must be: ``(cube, field, filename)``.
+ 2. Modifies the given cube inplace, unless a new cube is
+ returned by the function.
+ 3. If the cube is to be rejected the callback must raise
+ an :class:`iris.exceptions.IgnoreCubeException`.
- .. note::
-
- It is possible that this function returns None for certain callbacks,
- the caller of this function should handle this case.
-
- .. note::
+ Notes
+ -----
+ It is possible that this function returns None for certain callbacks,
+ the caller of this function should handle this case.
- This function maintains laziness when called; it does not realise data.
- See more at :doc:`/userguide/real_and_lazy_data`.
+ This function maintains laziness when called; it does not realise data.
+ See more at :doc:`/userguide/real_and_lazy_data`.
"""
from iris.cube import Cube
@@ -89,7 +84,7 @@ def run_callback(callback, cube, field, filename):
def decode_uri(uri, default="file"):
r"""
- Decodes a single URI into scheme and scheme-specific parts.
+ Decode a single URI into scheme and scheme-specific parts.
In addition to well-formed URIs, it also supports bare file paths as strings
or :class:`pathlib.PurePath`. Both Windows and UNIX style paths are
@@ -101,25 +96,26 @@ def decode_uri(uri, default="file"):
from iris.io import *
- Examples:
- >>> from iris.io import decode_uri
- >>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b'))
- ('http', '//www.thing.com:8080/resource?id=a:b')
+ Examples
+ --------
+ >>> from iris.io import decode_uri
+ >>> print(decode_uri('http://www.thing.com:8080/resource?id=a:b'))
+ ('http', '//www.thing.com:8080/resource?id=a:b')
- >>> print(decode_uri('file:///data/local/dataZoo/...'))
- ('file', '///data/local/dataZoo/...')
+ >>> print(decode_uri('file:///data/local/dataZoo/...'))
+ ('file', '///data/local/dataZoo/...')
- >>> print(decode_uri('/data/local/dataZoo/...'))
- ('file', '/data/local/dataZoo/...')
+ >>> print(decode_uri('/data/local/dataZoo/...'))
+ ('file', '/data/local/dataZoo/...')
- >>> print(decode_uri('file:///C:\data\local\dataZoo\...'))
- ('file', '///C:\\data\\local\\dataZoo\\...')
+ >>> print(decode_uri('file:///C:\data\local\dataZoo\...'))
+ ('file', '///C:\\data\\local\\dataZoo\\...')
- >>> print(decode_uri('C:\data\local\dataZoo\...'))
- ('file', 'C:\\data\\local\\dataZoo\\...')
+ >>> print(decode_uri('C:\data\local\dataZoo\...'))
+ ('file', 'C:\\data\\local\\dataZoo\\...')
- >>> print(decode_uri('dataZoo/...'))
- ('file', 'dataZoo/...')
+ >>> print(decode_uri('dataZoo/...'))
+ ('file', 'dataZoo/...')
>>> print(decode_uri({}))
('data', {})
@@ -157,7 +153,7 @@ def expand_filespecs(file_specs, files_expected=True):
----------
file_specs : iterable of str
File paths which may contain ``~`` elements or wildcards.
- files_expected : bool, default=True
+ files_expected : bool, optional, default=True
Whether file is expected to exist (i.e. for load).
Returns
@@ -206,14 +202,16 @@ def expand_filespecs(file_specs, files_expected=True):
def load_files(filenames, callback, constraints=None):
"""
- Takes a list of filenames which may also be globs, and optionally a
+ Create a generator of Cubes from given files.
+
+ Take a list of filenames which may also be globs, and optionally a
constraint set and a callback function, and returns a
generator of Cubes from the given files.
- .. note::
-
- Typically, this function should not be called directly; instead, the
- intended interface for loading is :func:`iris.load`.
+ Notes
+ -----
+ Typically, this function should not be called directly; instead, the
+ intended interface for loading is :func:`iris.load`.
"""
from iris.fileformats import FORMAT_AGENT
@@ -244,13 +242,15 @@ def load_files(filenames, callback, constraints=None):
def load_http(urls, callback):
"""
- Takes a list of OPeNDAP URLs and a callback function, and returns a generator
- of Cubes from the given URLs.
+ Create generator of Cubes from the given OPeNDAP URLs.
- .. note::
+ Take a list of OPeNDAP URLs and a callback function, and returns a generator
+ of Cubes from the given URLs.
- Typically, this function should not be called directly; instead, the
- intended interface for loading is :func:`iris.load`.
+ Notes
+ -----
+ Typically, this function should not be called directly; instead, the
+ intended interface for loading is :func:`iris.load`.
"""
#
@@ -277,8 +277,8 @@ def load_http(urls, callback):
def load_data_objects(urls, callback):
"""
- Takes a list of data-source objects and a callback function, and returns a
- generator of Cubes.
+ Take a list of data-source objects and a callback function, returns a generator of Cubes.
+
The 'objects' take the place of 'uris' in the load calls.
The appropriate types of the data-source objects are expected to be
recognised by the handlers : This is done in the usual way by passing the
@@ -346,12 +346,16 @@ def add_saver(file_extension, new_saver):
"""
Add a custom saver to the Iris session.
- Args:
-
- * file_extension: A string such as "pp" or "my_format".
- * new_saver: A function of the form ``my_saver(cube, target)``.
+ Parameters
+ ----------
+ file_extension: str
+ A string such as "pp" or "my_format".
+ new_saver:function
+ A function of the form ``my_saver(cube, target)``.
- See also :func:`iris.io.save`
+ See Also
+ --------
+ :func:`iris.io.save`
"""
# Make sure it's a func with 2+ args
@@ -369,14 +373,16 @@ def find_saver(filespec):
"""
Find the saver function appropriate to the given filename or extension.
- Args:
-
- * filespec
- A string such as "my_file.pp" or "PP".
+ Parameters
+ ----------
+ filespec: str
+ A string such as "my_file.pp" or "PP".
- Returns:
- A save function or None.
- Save functions can be passed to :func:`iris.io.save`.
+ Returns
+ -------
+ Save function
+ Save functions can be passed to :func:`iris.io.save`. Value may also
+ be None.
"""
_check_init_savers()
@@ -401,23 +407,23 @@ def save(source, target, saver=None, **kwargs):
Iris currently supports three file formats for saving, which it can
recognise by filename extension:
- * netCDF - the Unidata network Common Data Format:
- * see :func:`iris.fileformats.netcdf.save`
- * GRIB2 - the WMO GRIdded Binary data format:
- * see :func:`iris_grib.save_grib2`.
- * PP - the Met Office UM Post Processing Format:
- * see :func:`iris.fileformats.pp.save`
+ * netCDF - the Unidata network Common Data Format:
+ * see :func:`iris.fileformats.netcdf.save`
+ * GRIB2 - the WMO GRIdded Binary data format:
+ * see :func:`iris_grib.save_grib2`.
+ * PP - the Met Office UM Post Processing Format:
+ * see :func:`iris.fileformats.pp.save`
A custom saver can be provided to the function to write to a different
file format.
Parameters
----------
- source : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
- target : str or pathlib.PurePath or io.TextIOWrapper
+ source: :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
+ target: str or pathlib.PurePath or io.TextIOWrapper
When given a filename or file, Iris can determine the
file format.
- saver : str or function, optional
+ saver: str or function, optional
Specifies the file format to save.
If omitted, Iris will attempt to determine the format.
If a string, this is the recognised filename extension
@@ -428,7 +434,7 @@ def save(source, target, saver=None, **kwargs):
is assumed that a saver will accept an ``append`` keyword
if its file format can handle multiple cubes. See also
:func:`iris.io.add_saver`.
- **kwargs : dict, optional
+ **kwargs: dict, optional
All other keywords are passed through to the saver function; see the
relevant saver documentation for more information on keyword arguments.
@@ -470,8 +476,7 @@ def save(source, target, saver=None, **kwargs):
>>> iris.save(my_cube_list, "myfile.nc", netcdf_format="NETCDF3_CLASSIC")
Notes
- ------
-
+ -----
This function maintains laziness when called; it does not realise data.
See more at :doc:`/userguide/real_and_lazy_data`.
diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py
index 9def0ada98..0189efa658 100644
--- a/lib/iris/io/format_picker.py
+++ b/lib/iris/io/format_picker.py
@@ -4,9 +4,10 @@
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
-A module to provide convenient file format identification through a combination of filename extension
-and file based *magic* numbers.
+Provide convenient file format identification.
+A module to provide convenient file format identification through a combination
+of filename extension and file based *magic* numbers.
To manage a collection of FormatSpecifications for loading::
@@ -25,9 +26,11 @@
with open(png_filename, 'rb') as png_fh:
handling_spec = fagent.get_spec(png_filename, png_fh)
-In the example, handling_spec will now be the png_spec previously added to the agent.
+In the example, handling_spec will now be the png_spec previously added to the
+agent.
-Now that a specification has been found, if a handler has been given with the specification, then the file can be handled::
+Now that a specification has been found, if a handler has been given with the
+specification, then the file can be handled::
handler = handling_spec.handler
if handler is None:
@@ -35,8 +38,8 @@
else:
result = handler(filename)
-The calling sequence of handler is dependent on the function given in the original specification and can be customised to your project's needs.
-
+The calling sequence of handler is dependent on the function given in the
+original specification and can be customised to your project's needs.
"""
@@ -48,10 +51,14 @@
class FormatAgent:
"""
- The FormatAgent class is the containing object which is responsible for identifying the format of a given file
- by interrogating its children FormatSpecification instances.
+ Identifies format of a given file by interrogating its children instances.
+
+ The FormatAgent class is the containing object which is responsible for
+ identifying the format of a given file by interrogating its children
+ FormatSpecification instances.
- Typically a FormatAgent will be created empty and then extended with the :meth:`FormatAgent.add_spec` method::
+ Typically a FormatAgent will be created empty and then extended with the
+ :meth:`FormatAgent.add_spec` method::
agent = FormatAgent()
agent.add_spec(NetCDF_specification)
@@ -63,12 +70,11 @@ class FormatAgent:
"""
def __init__(self, format_specs=None):
- """ """
self._format_specs = list(format_specs or [])
self._format_specs.sort()
def add_spec(self, format_spec):
- """Add a FormatSpecification instance to this agent for format consideration."""
+ """Add a FormatSpecification instance to this agent for format."""
self._format_specs.append(format_spec)
self._format_specs.sort()
@@ -83,15 +89,22 @@ def __str__(self):
def get_spec(self, basename, buffer_obj):
"""
+ Pick the first FormatSpecification.
+
Pick the first FormatSpecification which can handle the given
filename and file/buffer object.
- .. note::
+ Parameters
+ ----------
+ basename : TBD
+ buffer_obj : TBD
- ``buffer_obj`` may be ``None`` when a seekable file handle is not
- feasible (such as over the http protocol). In these cases only the
- format specifications which do not require a file handle are
- tested.
+ Notes
+ -----
+ ``buffer_obj`` may be ``None`` when a seekable file handle is not
+ feasible (such as over the http protocol). In these cases only the
+ format specifications which do not require a file handle are
+ tested.
"""
element_cache = {}
@@ -146,8 +159,10 @@ class FormatSpecification:
"""
Provides the base class for file type definition.
- Every FormatSpecification instance has a name which can be accessed with the :attr:`FormatSpecification.name` property and
- a FileElement, such as filename extension or 32-bit magic number, with an associated value for format identification.
+ Every FormatSpecification instance has a name which can be accessed with
+ the :attr:`FormatSpecification.name` property and a FileElement, such as
+ filename extension or 32-bit magic number, with an associated value for
+ format identification.
"""
@@ -161,20 +176,26 @@ def __init__(
constraint_aware_handler=False,
):
"""
- Constructs a new FormatSpecification given the format_name and particular FileElements
-
- Args:
-
- * format_name - string name of fileformat being described
- * file_element - FileElement instance of the element which identifies this FormatSpecification
- * file_element_value - The value that the file_element should take if a file matches this FormatSpecification
-
- Kwargs:
-
- * handler - function which will be called when the specification has been identified and is required to handler a format.
- If None, then the file can still be identified but no handling can be done.
- * priority - Integer giving a priority for considering this specification where higher priority means sooner consideration.
-
+ Construct a new FormatSpecification.
+
+ Parameters
+ ----------
+ format_name: str
+ string name of fileformat being described
+ file_element
+ FileElement instance of the element which identifies this
+ FormatSpecification
+ file_element_value
+ The value that the file_element should take if a file matches this
+ FormatSpecification
+ handler: optional
+ function which will be called when the specification has been
+ identified and is required to handler a format. If None, then the
+ file can still be identified but no handling can be done.
+ priority: int
+ Integer giving a priority for considering this specification where
+ higher priority means sooner consideration.
+ constraint_aware_handler: optional, default=False
"""
if not isinstance(file_element, FileElement):
raise ValueError(
@@ -190,8 +211,9 @@ def __init__(
self.constraint_aware_handler = constraint_aware_handler
def __hash__(self):
- # Hashed by specification for consistent ordering in FormatAgent (including self._handler in this hash
- # for example would order randomly according to object id)
+ # Hashed by specification for consistent ordering in FormatAgent
+ # (including self._handler in this hash for example would order
+ # randomly according to object id)
return hash(self._file_element)
@property
@@ -204,12 +226,12 @@ def file_element_value(self):
@property
def name(self):
- """The name of this FileFormat. (Read only)"""
+ """The name of this FileFormat. (Read only)."""
return self._format_name
@property
def handler(self):
- """The handler function of this FileFormat. (Read only)"""
+ """The handler function of this FileFormat. (Read only)."""
return self._handler
def _sort_key(self):
@@ -231,7 +253,8 @@ def __ne__(self, other):
return not (self == other)
def __repr__(self):
- # N.B. loader is not always going to provide a nice repr if it is a lambda function, hence a prettier version is available in __str__
+ # N.B. loader is not always going to provide a nice repr if it is a
+ # lambda function, hence a prettier version is available in __str__
return "FormatSpecification(%r, %r, %r, handler=%r, priority=%s)" % (
self._format_name,
self._file_element,
@@ -250,23 +273,27 @@ def __str__(self):
class FileElement:
"""
- Represents a specific aspect of a FileFormat which can be identified using the given element getter function.
+ Represents a specific aspect of a FileFormat.
+
+ Represents a specific aspect of a FileFormat which can be identified using
+ the given element getter function.
"""
def __init__(self, requires_fh=True):
"""
- Constructs a new file element, which may require a file buffer.
+ Construct a new file element, which may require a file buffer.
- Kwargs:
-
- * requires_fh - Whether this FileElement needs a file buffer.
+ Parameters
+ ----------
+ requires_fh: optional
+ Whether this FileElement needs a file buffer.
"""
self.requires_fh = requires_fh
def get_element(self, basename, file_handle):
- """Called when identifying the element of a file that this FileElement is representing."""
+ """Identify the element of a file that this FileElement is representing."""
raise NotImplementedError("get_element must be defined in a subclass")
def __hash__(self):
@@ -314,11 +341,14 @@ class LeadingLine(FileElement):
"""A :class:`FileElement` that returns the first line from the file."""
def get_element(self, basename, file_handle):
+ # noqa: D102
return file_handle.readline()
class UriProtocol(FileElement):
"""
+ Return the scheme and part from a URI, using :func:`~iris.io.decode_uri`.
+
A :class:`FileElement` that returns the "scheme" and "part" from a URI,
using :func:`~iris.io.decode_uri`.
@@ -328,6 +358,7 @@ def __init__(self):
FileElement.__init__(self, requires_fh=False)
def get_element(self, basename, file_handle):
+ # noqa: D102
from iris.io import decode_uri
return decode_uri(basename)[0]
diff --git a/lib/iris/time.py b/lib/iris/time.py
index 51aac3d46d..6788b01c19 100644
--- a/lib/iris/time.py
+++ b/lib/iris/time.py
@@ -3,17 +3,16 @@
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
-"""
-Time handling.
-"""
+"""Time handling."""
import functools
@functools.total_ordering
class PartialDateTime:
- """
+ """Allow partial comparisons against datetime-like objects.
+
A :class:`PartialDateTime` object specifies values for some subset of
the calendar/time fields (year, month, hour, etc.) for comparing
with :class:`datetime.datetime`-like instances.
@@ -45,7 +44,7 @@ class PartialDateTime:
#: A dummy value provided as a workaround to allow comparisons with
#: :class:`datetime.datetime`.
#: See http://bugs.python.org/issue8005.
- # NB. It doesn't even matter what this value is.
+ #: NB. It doesn't even matter what this value is.
timetuple = None
def __init__(
@@ -58,20 +57,28 @@ def __init__(
second=None,
microsecond=None,
):
- """
- Allows partial comparisons against datetime-like objects.
-
- Args:
-
- * year (int):
- * month (int):
- * day (int):
- * hour (int):
- * minute (int):
- * second (int):
- * microsecond (int):
-
- For example, to select any days of the year after the 3rd of April:
+ """Allow partial comparisons against datetime-like objects.
+
+ Parameters
+ ----------
+ year: int
+ The year number as an integer, or None.
+ month: int
+ The month number as an integer, or None.
+ day: int
+ The day number as an integer, or None.
+ hour: int
+ The hour number as an integer, or None.
+ minute: int
+ The minute number as an integer, or None.
+ second: int
+ The second number as an integer, or None.
+ microsecond: int
+ The microsecond number as an integer, or None.
+
+ Examples
+ --------
+ To select any days of the year after the 3rd of April:
>>> from iris.time import PartialDateTime
>>> import datetime
@@ -86,20 +93,12 @@ def __init__(
False
"""
-
- #: The year number as an integer, or None.
self.year = year
- #: The month number as an integer, or None.
self.month = month
- #: The day number as an integer, or None.
self.day = day
- #: The hour number as an integer, or None.
self.hour = hour
- #: The minute number as an integer, or None.
self.minute = minute
- #: The second number as an integer, or None.
self.second = second
- #: The microsecond number as an integer, or None.
self.microsecond = microsecond
def __repr__(self):
From d0ae7cd9952acb05821d96ae8afa3f4ece029c31 Mon Sep 17 00:00:00 2001
From: Tremain Knight <2108488+tkknight@users.noreply.github.com>
Date: Mon, 13 Nov 2023 15:27:05 +0000
Subject: [PATCH 2/6] added some noqa.
---
lib/iris/io/format_picker.py | 13 ++++++++++---
1 file changed, 10 insertions(+), 3 deletions(-)
diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py
index 0189efa658..30cb741f49 100644
--- a/lib/iris/io/format_picker.py
+++ b/lib/iris/io/format_picker.py
@@ -218,10 +218,12 @@ def __hash__(self):
@property
def file_element(self):
+ # noqa D102
return self._file_element
@property
def file_element_value(self):
+ # noqa D102
return self._file_element_value
@property
@@ -314,6 +316,7 @@ def __init__(self, num_bytes, offset=None):
self._offset = offset
def get_element(self, basename, file_handle):
+ # noqa D102
if self._offset is not None:
file_handle.seek(self._offset)
bytes = file_handle.read(self._num_bytes)
@@ -334,6 +337,7 @@ class FileExtension(FileElement):
"""A :class:`FileElement` that returns the extension from the filename."""
def get_element(self, basename, file_handle):
+ # noqa D102
return os.path.splitext(basename)[1]
@@ -377,7 +381,10 @@ def __init__(self):
super().__init__(requires_fh=False)
def get_element(self, basename, file_handle):
- # In this context, there should *not* be a file opened by the handler.
- # Just return 'basename', which in this case is not a name, or even a
- # string, but a passed 'data object'.
+ """
+ In this context, there should *not* be a file opened by the handler.
+
+ Just return 'basename', which in this case is not a name, or even a
+ string, but a passed 'data object'.
+ """
return basename
From 9ac350a8ac19975b722db6162d55a4419c33f9e9 Mon Sep 17 00:00:00 2001
From: Tremain Knight <2108488+tkknight@users.noreply.github.com>
Date: Tue, 14 Nov 2023 09:40:31 +0000
Subject: [PATCH 3/6] api contents ordering to aplhabetical
---
docs/src/conf.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/src/conf.py b/docs/src/conf.py
index 6282a68ba4..5bfbfcfe41 100644
--- a/docs/src/conf.py
+++ b/docs/src/conf.py
@@ -195,7 +195,7 @@ def _dotv(version):
todo_include_todos = True
# api generation configuration
-autodoc_member_order = "bysource"
+autodoc_member_order = "alphabetical"
autodoc_default_flags = ["show-inheritance"]
# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autodoc_typehints
From dc5c830445e1ee4cf060262cbdf3963044663dda Mon Sep 17 00:00:00 2001
From: Tremain Knight <2108488+tkknight@users.noreply.github.com>
Date: Mon, 27 Nov 2023 11:01:07 +0000
Subject: [PATCH 4/6] remove duplicate note
---
lib/iris/fileformats/netcdf/saver.py | 9 ++-------
1 file changed, 2 insertions(+), 7 deletions(-)
diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py
index c2f82537e6..c7f3d759bd 100644
--- a/lib/iris/fileformats/netcdf/saver.py
+++ b/lib/iris/fileformats/netcdf/saver.py
@@ -2826,7 +2826,7 @@ def save(
.. Note::
when computed, the returned :class:`dask.delayed.Delayed` object returns
- a list of :class:`Warning`\\s : These are any warnings which *would* have
+ a list of :class:`Warning` : These are any warnings which *would* have
been issued in the save call, if ``compute`` had been ``True``.
.. Note::
@@ -2844,17 +2844,12 @@ def save(
Otherwise returns a :class:`dask.delayed.Delayed`, which implements delayed
writing to fill in the variables data.
- .. note::
-
- The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
- `chunksizes` and `endian` keywords are silently ignored for netCDF 3
- files that do not use HDF5.
-
Notes
-----
The `zlib`, `complevel`, `shuffle`, `fletcher32`, `contiguous`,
`chunksizes` and `endian` keywords are silently ignored for netCDF 3
files that do not use HDF5.
+
"""
from iris.cube import Cube, CubeList
From 2086a2532841001e7f66871957242ddc7600bcb5 Mon Sep 17 00:00:00 2001
From: Tremain Knight <2108488+tkknight@users.noreply.github.com>
Date: Wed, 29 Nov 2023 10:43:14 +0000
Subject: [PATCH 5/6] updated string to str for rendering in docs
---
lib/iris/fileformats/netcdf/saver.py | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py
index c7f3d759bd..9a3b0c503a 100644
--- a/lib/iris/fileformats/netcdf/saver.py
+++ b/lib/iris/fileformats/netcdf/saver.py
@@ -395,11 +395,11 @@ def __init__(self, filename, netcdf_format, compute=True):
Parameters
----------
- filename : string or netCDF4.Dataset
+ filename : str or netCDF4.Dataset
Name of the netCDF file to save the cube.
OR a writeable object supporting the :class:`netCF4.Dataset` api.
- netcdf_format : string
+ netcdf_format : str
Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format.
@@ -872,7 +872,7 @@ def _add_mesh(self, cube_or_mesh):
Returns
-------
- cf_mesh_name: string or None
+ cf_mesh_name: str or None
The name of the mesh variable created, or None if the cube does not
have a mesh.
"""
@@ -1827,7 +1827,7 @@ def _create_generic_cf_array_var(
An Iris :class:`iris.coords._DimensionalMetadata`, belonging to the
cube. Provides data, units and standard/long/var names.
Not used if 'element_dims' is not None.
- element_dims : list of string, or None
+ element_dims : list of str, or None
If set, contains the variable dimension (names),
otherwise these are taken from `element.cube_dims[cube]`.
For Mesh components (element coordinates and connectivities), this
@@ -2292,7 +2292,7 @@ def _create_cf_data_variable(
String names for each dimension of the cube.
local_keys: iterable of str, optional
See :func:`iris.fileformats.netcdf.Saver.write`
- packing: type or string or dict or list, optional
+ packing: type or str or dict or list, optional
See :func:`iris.fileformats.netcdf.Saver.write`
fill_value: optional
See :func:`iris.fileformats.netcdf.Saver.write`
@@ -2726,7 +2726,7 @@ def save(
When saving to a dataset, ``compute`` **must** be ``False`` :
See the ``compute`` parameter.
- netcdf_format: string
+ netcdf_format: str
Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format.
local_keys: iterable of str, optional
From 506968f1b36f723ff3f82237ed5b71911eb5aabd Mon Sep 17 00:00:00 2001
From: Tremain Knight <2108488+tkknight@users.noreply.github.com>
Date: Mon, 4 Dec 2023 16:01:30 +0000
Subject: [PATCH 6/6] ensured spaced around colon for listed parameters.
---
lib/iris/config.py | 2 +-
lib/iris/fileformats/netcdf/_dask_locks.py | 2 +-
lib/iris/fileformats/netcdf/loader.py | 2 +-
lib/iris/fileformats/netcdf/saver.py | 122 ++++++++++-----------
lib/iris/io/__init__.py | 16 +--
lib/iris/io/format_picker.py | 10 +-
lib/iris/time.py | 14 +--
7 files changed, 84 insertions(+), 84 deletions(-)
diff --git a/lib/iris/config.py b/lib/iris/config.py
index 0c5bffd4a8..22fb93a06a 100644
--- a/lib/iris/config.py
+++ b/lib/iris/config.py
@@ -199,7 +199,7 @@ def __init__(self, conventions_override=None):
Parameters
----------
- conventions_override: bool, optional
+ conventions_override : bool, optional
Define whether the CF Conventions version (e.g. `CF-1.6`) set when
saving a cube to a NetCDF file should be defined by
Iris (the default) or the cube being saved. If `False`
diff --git a/lib/iris/fileformats/netcdf/_dask_locks.py b/lib/iris/fileformats/netcdf/_dask_locks.py
index 2ce5f8432a..82edbf202e 100644
--- a/lib/iris/fileformats/netcdf/_dask_locks.py
+++ b/lib/iris/fileformats/netcdf/_dask_locks.py
@@ -125,7 +125,7 @@ def get_worker_lock(identity: str):
Parameters
----------
- identity: str
+ identity : str
"""
scheduler_type = get_dask_array_scheduler_type()
diff --git a/lib/iris/fileformats/netcdf/loader.py b/lib/iris/fileformats/netcdf/loader.py
index eea0e9a2ac..84e04c1589 100644
--- a/lib/iris/fileformats/netcdf/loader.py
+++ b/lib/iris/fileformats/netcdf/loader.py
@@ -575,7 +575,7 @@ def load_cubes(file_sources, callback=None, constraints=None):
One or more NetCDF filenames/OPeNDAP URLs to load from.
OR open datasets.
- callback: function, optional
+ callback : function, optional
Function which can be passed on to :func:`iris.io.run_callback`.
Returns
diff --git a/lib/iris/fileformats/netcdf/saver.py b/lib/iris/fileformats/netcdf/saver.py
index 9a3b0c503a..b0bff313e9 100644
--- a/lib/iris/fileformats/netcdf/saver.py
+++ b/lib/iris/fileformats/netcdf/saver.py
@@ -350,7 +350,7 @@ def _fillvalue_report(fill_info, is_masked, contains_fill_value, warn=False):
Returns
-------
- None or :class:`Warning`
+ None or :class:`Warning`
If not None, indicates a known or possible problem with filling
"""
@@ -797,7 +797,7 @@ def update_global_attributes(self, attributes=None, **kwargs):
Parameters
----------
- attributes: dict or iterable of key, value pairs
+ attributes : dict or iterable of key, value pairs
CF global attributes to be updated.
"""
# TODO: when when iris.FUTURE.save_split_attrs is removed, this routine will
@@ -821,9 +821,9 @@ def _create_cf_dimensions(
Parameters
----------
- cube: :class:`iris.cube.Cube`
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` in which to lookup coordinates.
- unlimited_dimensions: iterable of strings and/or :class:`iris.coords.Coord` objects):
+ unlimited_dimensions : iterable of strings and/or :class:`iris.coords.Coord` objects):
List of coordinates to make unlimited (None by default).
Returns
@@ -867,12 +867,12 @@ def _add_mesh(self, cube_or_mesh):
Parameters
----------
- cube_or_mesh: :class:`iris.cube.Cube`or :class:`iris.experimental.ugrid.Mesh`
+ cube_or_mesh : :class:`iris.cube.Cube`or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
Returns
-------
- cf_mesh_name: str or None
+ cf_mesh_name : str or None
The name of the mesh variable created, or None if the cube does not
have a mesh.
"""
@@ -1042,11 +1042,11 @@ def _add_aux_coords(self, cube, cf_var_cube, dimension_names):
Parameters
----------
- cube: :class:`iris.cube.Cube`
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- cf_var_cube: :class:`netcdf.netcdf_variable`
+ cf_var_cube : :class:`netcdf.netcdf_variable`
cf variable cube representation.
- dimension_names: list
+ dimension_names : list
Names associated with the dimensions of the cube.
"""
from iris.experimental.ugrid.mesh import (
@@ -1083,11 +1083,11 @@ def _add_cell_measures(self, cube, cf_var_cube, dimension_names):
Parameters
----------
- cube: :class:`iris.cube.Cube`
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- cf_var_cube: :class:`netcdf.netcdf_variable`
+ cf_var_cube : :class:`netcdf.netcdf_variable`
cf variable cube representation.
- dimension_names: list
+ dimension_names : list
Names associated with the dimensions of the cube.
"""
return self._add_inner_related_vars(
@@ -1103,11 +1103,11 @@ def _add_ancillary_variables(self, cube, cf_var_cube, dimension_names):
Parameters
----------
- cube: :class:`iris.cube.Cube`
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- cf_var_cube: :class:`netcdf.netcdf_variable`
+ cf_var_cube : :class:`netcdf.netcdf_variable`
cf variable cube representation.
- dimension_names: list
+ dimension_names : list
Names associated with the dimensions of the cube.
"""
return self._add_inner_related_vars(
@@ -1123,9 +1123,9 @@ def _add_dim_coords(self, cube, dimension_names):
Parameters
----------
- cube: :class:`iris.cube.Cube`
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
- dimension_names: list
+ dimension_names : list
Names associated with the dimensions of the cube.
"""
# Ensure we create the netCDF coordinate variables first.
@@ -1148,11 +1148,11 @@ def _add_aux_factories(self, cube, cf_var_cube, dimension_names):
Parameters
----------
- cube: :class:`iris.cube.Cube`
+ cube : :class:`iris.cube.Cube`
A :class:`iris.cube.Cube` to be saved to a netCDF file.
cf_var_cube: :class:`netcdf.netcdf_variable`
CF variable cube representation.
- dimension_names: list
+ dimension_names : list
Names associated with the dimensions of the cube.
"""
primaries = []
@@ -1241,14 +1241,14 @@ def _get_dim_names(self, cube_or_mesh):
Parameters
----------
- cube_or_mesh: :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
+ cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
Returns
-------
- mesh_dimensions: list of str
+ mesh_dimensions : list of str
A list of the mesh dimensions of the attached mesh, if any.
- cube_dimensions: list of str
+ cube_dimensions : list of str
A lists of dimension names for each dimension of the cube
Notes
@@ -1467,7 +1467,7 @@ def cf_valid_var_name(var_name):
Parameters
----------
- var_name: str
+ var_name : str
The var_name to normalise
Returns
@@ -1490,7 +1490,7 @@ def _cf_coord_standardised_units(coord):
Parameters
----------
- coord: :class:`iris.coords.Coord`
+ coord : :class:`iris.coords.Coord`
A coordinate of a cube.
Returns
@@ -1552,11 +1552,11 @@ def _create_cf_bounds(self, coord, cf_var, cf_name):
Parameters
----------
- coord: :class:`iris.coords.Coord`
+ coord : :class:`iris.coords.Coord`
A coordinate of a cube.
cf_var:
CF-netCDF variable
- cf_name: str
+ cf_name : str
name of the CF-NetCDF variable.
Returns
@@ -1613,7 +1613,7 @@ def _get_cube_variable_name(self, cube):
Parameters
----------
- cube: :class:`iris.cube.Cube`
+ cube : :class:`iris.cube.Cube`
An instance of a cube for which a CF-netCDF variable
name is required.
@@ -1638,9 +1638,9 @@ def _get_coord_variable_name(self, cube_or_mesh, coord):
Parameters
----------
- cube_or_mesh: :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
+ cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- coord: :class:`iris.coords._DimensionalMetadata`
+ coord : :class:`iris.coords._DimensionalMetadata`
An instance of a coordinate (or similar), for which a CF-netCDF
variable name is required.
@@ -1701,7 +1701,7 @@ def _get_mesh_variable_name(self, mesh):
Parameters
----------
- mesh: :class:`iris.experimental.ugrid.mesh.Mesh`
+ mesh : :class:`iris.experimental.ugrid.mesh.Mesh`
An instance of a Mesh for which a CF-netCDF variable name is
required.
@@ -1728,7 +1728,7 @@ def _create_mesh(self, mesh):
Parameters
----------
- mesh: :class:`iris.experimental.ugrid.mesh.Mesh`
+ mesh : :class:`iris.experimental.ugrid.mesh.Mesh`
The Mesh to be saved to CF-netCDF file.
Returns
@@ -1819,11 +1819,11 @@ def _create_generic_cf_array_var(
Parameters
----------
- cube_or_mesh: :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
+ cube_or_mesh : :class:`iris.cube.Cube` or :class:`iris.experimental.ugrid.Mesh`
The Cube or Mesh being saved to the netCDF file.
- cube_dim_names: list of str
+ cube_dim_names : list of str
The name of each dimension of the cube.
- element: :class:`iris.coords._DimensionalMetadata`
+ element : :class:`iris.coords._DimensionalMetadata`
An Iris :class:`iris.coords._DimensionalMetadata`, belonging to the
cube. Provides data, units and standard/long/var names.
Not used if 'element_dims' is not None.
@@ -1832,7 +1832,7 @@ def _create_generic_cf_array_var(
otherwise these are taken from `element.cube_dims[cube]`.
For Mesh components (element coordinates and connectivities), this
*must* be passed in, as "element.cube_dims" does not function.
- fill_value: number or None
+ fill_value : number or None
If set, create the variable with this fill-value, and fill any
masked data points with this value.
If not set, standard netcdf4-python behaviour : the variable has no
@@ -2015,10 +2015,10 @@ def _create_cf_grid_mapping(self, cube, cf_var_cube):
Parameters
----------
- cube: :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
+ cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or list of
cubes to be saved to a netCDF file.
- cf_var_cube: :class:`netcdf.netcdf_variable`
+ cf_var_cube : :class:`netcdf.netcdf_variable`
cf variable cube representation.
Returns
@@ -2286,15 +2286,15 @@ def _create_cf_data_variable(
Parameters
----------
- cube: :class:`iris.cube.Cube`
+ cube : :class:`iris.cube.Cube`
The associated cube being saved to CF-netCDF file.
- dimension_names: list
+ dimension_names : list
String names for each dimension of the cube.
- local_keys: iterable of str, optional
+ local_keys : iterable of str, optional
See :func:`iris.fileformats.netcdf.Saver.write`
- packing: type or str or dict or list, optional
+ packing : type or str or dict or list, optional
See :func:`iris.fileformats.netcdf.Saver.write`
- fill_value: optional
+ fill_value : optional
See :func:`iris.fileformats.netcdf.Saver.write`
All other keywords are passed through to the dataset's `createVariable`
@@ -2455,7 +2455,7 @@ def _increment_name(self, varname):
Parameters
----------
- varname: str
+ varname : str
Variable name to increment.
Returns
@@ -2645,12 +2645,12 @@ def complete(self, issue_warnings=True) -> List[Warning]:
Parameters
----------
- issue_warnings: bool, default = True
+ issue_warnings : bool, default = True
If true, issue all the resulting warnings with :func:`warnings.warn`.
Returns
-------
- warnings: list of Warning
+ warnings : list of Warning
Any warnings that were raised while writing delayed data.
"""
@@ -2715,10 +2715,10 @@ def save(
Parameters
----------
- cube: :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
+ cube : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
A :class:`iris.cube.Cube`, :class:`iris.cube.CubeList` or other
iterable of cubes to be saved to a netCDF file.
- filename: str
+ filename : str
Name of the netCDF file to save the cube(s).
**Or** an open, writeable :class:`netCDF4.Dataset`, or compatible object.
@@ -2726,10 +2726,10 @@ def save(
When saving to a dataset, ``compute`` **must** be ``False`` :
See the ``compute`` parameter.
- netcdf_format: str
+ netcdf_format : str
Underlying netCDF file format, one of 'NETCDF4', 'NETCDF4_CLASSIC',
'NETCDF3_CLASSIC' or 'NETCDF3_64BIT'. Default is 'NETCDF4' format.
- local_keys: iterable of str, optional
+ local_keys : iterable of str, optional
An interable of cube attribute keys. Any cube attributes with
matching keys will become attributes on the data variable rather
than global attributes.
@@ -2738,30 +2738,30 @@ def save(
This is *ignored* if 'split-attribute saving' is **enabled**,
i.e. when ``iris.FUTURE.save_split_attrs`` is ``True``.
- unlimited_dimensions: iterable of str and/or :class:`iris.coords.Coord` objects, optional
+ unlimited_dimensions : iterable of str and/or :class:`iris.coords.Coord` objects, optional
List of coordinate names (or coordinate objects) corresponding
to coordinate dimensions of `cube` to save with the NetCDF dimension
variable length 'UNLIMITED'. By default, no unlimited dimensions are
saved. Only the 'NETCDF4' format supports multiple 'UNLIMITED'
dimensions.
- zlib: bool, optional
+ zlib : bool, optional
If `True`, the data will be compressed in the netCDF file using gzip
compression (default `False`).
- complevel: int
+ complevel : int
An integer between 1 and 9 describing the level of compression desired
(default 4). Ignored if `zlib=False`.
- shuffle: bool, optional
+ shuffle : bool, optional
If `True`, the HDF5 shuffle filter will be applied before compressing
the data (default `True`). This significantly improves compression.
Ignored if `zlib=False`.
- fletcher32: bool, optional
+ fletcher32 : bool, optional
If `True`, the Fletcher32 HDF5 checksum algorithm is activated to
detect errors. Default `False`.
- contiguous: bool, optional
+ contiguous : bool, optional
If `True`, the variable data is stored contiguously on disk. Default
`False`. Setting to `True` for a variable with an unlimited dimension
will trigger an error.
- chunksizes: tuple of int, optional
+ chunksizes : tuple of int, optional
Used to manually specify the HDF5 chunksizes for each dimension of the
variable. A detailed discussion of HDF chunking and I/O performance is
available
@@ -2769,7 +2769,7 @@ def save(
Basically, you want the chunk size for each dimension to match as
closely as possible the size of the data block that users will read
from the file. `chunksizes` cannot be set if `contiguous=True`.
- endian: str
+ endian : str
Used to control whether the data is stored in little or big endian
format on disk. Possible values are 'little', 'big' or 'native'
(default). The library will automatically handle endian conversions
@@ -2777,7 +2777,7 @@ def save(
computer with the opposite format as the one used to create the file,
there may be some performance advantage to be gained by setting the
endian-ness.
- least_significant_digit: int, optional
+ least_significant_digit : int, optional
If `least_significant_digit` is specified, variable data will be
truncated (quantized). In conjunction with `zlib=True` this produces
'lossy', but significantly more efficient compression. For example, if
@@ -2789,7 +2789,7 @@ def save(
"least_significant_digit -- power of ten of the smallest decimal place
in unpacked data that is a reliable value". Default is `None`, or no
quantization, or 'lossless' compression.
- packing: type or str or dict or list, optional
+ packing : type or str or dict or list, optional
A numpy integer datatype (signed or unsigned) or a string that
describes a numpy integer dtype (i.e. 'i2', 'short', 'u4') or a dict
of packing parameters as described below or an iterable of such types,
@@ -2807,14 +2807,14 @@ def save(
a list it must have the same number of elements as `cube` if `cube` is
a :class:`iris.cube.CubeList`, or one element, and each element of
this argument will be applied to each cube separately.
- fill_value: numeric or list, optional
+ fill_value : numeric or list, optional
The value to use for the `_FillValue` attribute on the netCDF variable.
If `packing` is specified the value of `fill_value` should be in the
domain of the packed data. If this argument is a list it must have the
same number of elements as `cube` if `cube` is a
:class:`iris.cube.CubeList`, or a single element, and each element of
this argument will be applied to each cube separately.
- compute: bool, optional
+ compute : bool, optional
Default is ``True``, meaning complete the file immediately, and return ``None``.
When ``False``, create the output file but don't write any lazy array content to
diff --git a/lib/iris/io/__init__.py b/lib/iris/io/__init__.py
index 16f7af48e6..87725789e5 100644
--- a/lib/iris/io/__init__.py
+++ b/lib/iris/io/__init__.py
@@ -41,7 +41,7 @@ def run_callback(callback, cube, field, filename):
Parameters
----------
- callback
+ callback :
A function to add metadata from the originating field and/or URI which
obeys the following rules:
@@ -348,9 +348,9 @@ def add_saver(file_extension, new_saver):
Parameters
----------
- file_extension: str
+ file_extension : str
A string such as "pp" or "my_format".
- new_saver:function
+ new_saver : function
A function of the form ``my_saver(cube, target)``.
See Also
@@ -375,7 +375,7 @@ def find_saver(filespec):
Parameters
----------
- filespec: str
+ filespec : str
A string such as "my_file.pp" or "PP".
Returns
@@ -419,11 +419,11 @@ def save(source, target, saver=None, **kwargs):
Parameters
----------
- source: :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
- target: str or pathlib.PurePath or io.TextIOWrapper
+ source : :class:`iris.cube.Cube` or :class:`iris.cube.CubeList`
+ target : str or pathlib.PurePath or io.TextIOWrapper
When given a filename or file, Iris can determine the
file format.
- saver: str or function, optional
+ saver : str or function, optional
Specifies the file format to save.
If omitted, Iris will attempt to determine the format.
If a string, this is the recognised filename extension
@@ -434,7 +434,7 @@ def save(source, target, saver=None, **kwargs):
is assumed that a saver will accept an ``append`` keyword
if its file format can handle multiple cubes. See also
:func:`iris.io.add_saver`.
- **kwargs: dict, optional
+ **kwargs : dict, optional
All other keywords are passed through to the saver function; see the
relevant saver documentation for more information on keyword arguments.
diff --git a/lib/iris/io/format_picker.py b/lib/iris/io/format_picker.py
index bfc728f575..da64345cf3 100644
--- a/lib/iris/io/format_picker.py
+++ b/lib/iris/io/format_picker.py
@@ -179,15 +179,15 @@ def __init__(
Parameters
----------
- format_name: str
+ format_name : str
string name of fileformat being described
- file_element
+ file_element :
FileElement instance of the element which identifies this
FormatSpecification
- file_element_value
+ file_element_value :
The value that the file_element should take if a file matches this
FormatSpecification
- handler: optional
+ handler : optional
function which will be called when the specification has been
identified and is required to handler a format. If None, then the
file can still be identified but no handling can be done.
@@ -287,7 +287,7 @@ def __init__(self, requires_fh=True):
Parameters
----------
- requires_fh: optional
+ requires_fh : optional
Whether this FileElement needs a file buffer.
"""
diff --git a/lib/iris/time.py b/lib/iris/time.py
index abcf92e4bd..6ba85a0051 100644
--- a/lib/iris/time.py
+++ b/lib/iris/time.py
@@ -60,19 +60,19 @@ def __init__(
Parameters
----------
- year: int
+ year : int
The year number as an integer, or None.
- month: int
+ month : int
The month number as an integer, or None.
- day: int
+ day : int
The day number as an integer, or None.
- hour: int
+ hour : int
The hour number as an integer, or None.
- minute: int
+ minute : int
The minute number as an integer, or None.
- second: int
+ second : int
The second number as an integer, or None.
- microsecond: int
+ microsecond : int
The microsecond number as an integer, or None.
Examples