diff --git a/setup.cfg b/setup.cfg index 17f24b3f1ce..a0d4b46c14e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,7 +8,10 @@ testpaths=xarray/tests [flake8] max-line-length=79 ignore= - W503 + E402 # module level import not at top of file + E731 # do not assign a lambda expression, use a def + W503 # line break before binary operator + W504 # line break after binary operator exclude= doc/ diff --git a/versioneer.py b/versioneer.py index dffd66b69a6..577743023ca 100644 --- a/versioneer.py +++ b/versioneer.py @@ -1,3 +1,4 @@ +# flake8: noqa # Version: 0.18 diff --git a/xarray/backends/api.py b/xarray/backends/api.py index b4297801309..0ba2e94028c 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -1,33 +1,32 @@ from __future__ import absolute_import, division, print_function import os.path +import warnings from glob import glob from io import BytesIO from numbers import Number -import warnings import numpy as np from .. import Dataset, backends, conventions from ..core import indexing -from ..core.combine import _infer_concat_order_from_positions, _auto_combine +from ..core.combine import _auto_combine, _infer_concat_order_from_positions from ..core.pycompat import basestring, path_type -from ..core.utils import close_on_error, is_remote_uri, is_grib_path +from ..core.utils import close_on_error, is_grib_path, is_remote_uri from .common import ArrayWriter from .locks import _get_scheduler - DATAARRAY_NAME = '__xarray_dataarray_name__' DATAARRAY_VARIABLE = '__xarray_dataarray_variable__' def _get_default_engine_remote_uri(): try: - import netCDF4 + import netCDF4 # noqa engine = 'netcdf4' except ImportError: # pragma: no cover try: - import pydap # flake8: noqa + import pydap # noqa engine = 'pydap' except ImportError: raise ValueError('netCDF4 or pydap is required for accessing ' @@ -38,12 +37,12 @@ def _get_default_engine_remote_uri(): def _get_default_engine_grib(): msgs = [] try: - import Nio # flake8: noqa + import Nio # noqa msgs += ["set engine='pynio' to access GRIB files with PyNIO"] except ImportError: # pragma: no cover pass try: - import cfgrib # flake8: noqa + import cfgrib # noqa msgs += ["set engine='cfgrib' to access GRIB files with cfgrib"] except ImportError: # pragma: no cover pass @@ -56,7 +55,7 @@ def _get_default_engine_grib(): def _get_default_engine_gz(): try: - import scipy # flake8: noqa + import scipy # noqa engine = 'scipy' except ImportError: # pragma: no cover raise ValueError('scipy is required for accessing .gz files') @@ -65,11 +64,11 @@ def _get_default_engine_gz(): def _get_default_engine_netcdf(): try: - import netCDF4 # flake8: noqa + import netCDF4 # noqa engine = 'netcdf4' except ImportError: # pragma: no cover try: - import scipy.io.netcdf # flake8: noqa + import scipy.io.netcdf # noqa engine = 'scipy' except ImportError: raise ValueError('cannot read or write netCDF files without ' @@ -579,7 +578,7 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT, .. [1] http://xarray.pydata.org/en/stable/dask.html .. [2] http://xarray.pydata.org/en/stable/dask.html#chunking-and-performance - """ + """ # noqa if isinstance(paths, basestring): if is_remote_uri(paths): raise ValueError( @@ -642,11 +641,12 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT, # Discard ordering because it should be redone from coordinates ids = False - combined = _auto_combine(datasets, concat_dims=concat_dims, - compat=compat, - data_vars=data_vars, coords=coords, - infer_order_from_coords=infer_order_from_coords, - ids=ids) + combined = _auto_combine( + datasets, concat_dims=concat_dims, + compat=compat, + data_vars=data_vars, coords=coords, + infer_order_from_coords=infer_order_from_coords, + ids=ids) except ValueError: for ds in datasets: ds.close() diff --git a/xarray/backends/cfgrib_.py b/xarray/backends/cfgrib_.py index 0807900054a..96095b7b858 100644 --- a/xarray/backends/cfgrib_.py +++ b/xarray/backends/cfgrib_.py @@ -6,7 +6,7 @@ from ..core import indexing from ..core.utils import Frozen, FrozenOrderedDict from .common import AbstractDataStore, BackendArray -from .locks import ensure_lock, SerializableLock +from .locks import SerializableLock, ensure_lock # FIXME: Add a dedicated lock, even if ecCodes is supposed to be thread-safe # in most circumstances. See: diff --git a/xarray/backends/file_manager.py b/xarray/backends/file_manager.py index 6362842dd42..d329f9e734f 100644 --- a/xarray/backends/file_manager.py +++ b/xarray/backends/file_manager.py @@ -7,7 +7,6 @@ from .locks import acquire from .lru_cache import LRUCache - # Global cache for storing open files. FILE_CACHE = LRUCache( OPTIONS['file_cache_maxsize'], on_evict=lambda k, v: v.close()) diff --git a/xarray/backends/netCDF4_.py b/xarray/backends/netCDF4_.py index 2dc692e8724..9306b24a2fc 100644 --- a/xarray/backends/netCDF4_.py +++ b/xarray/backends/netCDF4_.py @@ -14,9 +14,9 @@ from ..core.utils import FrozenOrderedDict, close_on_error, is_remote_uri from .common import ( BackendArray, WritableCFDataStore, find_root, robust_getitem) -from .locks import (NETCDFC_LOCK, HDF5_LOCK, - combine_locks, ensure_lock, get_write_lock) from .file_manager import CachingFileManager, DummyFileManager +from .locks import ( + HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock, get_write_lock) from .netcdf3 import encode_nc3_attr_value, encode_nc3_variable # This lookup table maps from dtype.byteorder to a readable endian diff --git a/xarray/backends/netcdf3.py b/xarray/backends/netcdf3.py index c7bfa0ea20b..a6084649442 100644 --- a/xarray/backends/netcdf3.py +++ b/xarray/backends/netcdf3.py @@ -9,7 +9,7 @@ # Special characters that are permitted in netCDF names except in the # 0th position of the string -_specialchars = '_.@+- !"#$%&\()*,:;<=>?[]^`{|}~' +_specialchars = '_.@+- !"#$%&\\()*,:;<=>?[]^`{|}~' # The following are reserved names in CDL and may not be used as names of # variables, dimension, attributes diff --git a/xarray/backends/pseudonetcdf_.py b/xarray/backends/pseudonetcdf_.py index 41bc256835a..81b5722db78 100644 --- a/xarray/backends/pseudonetcdf_.py +++ b/xarray/backends/pseudonetcdf_.py @@ -10,7 +10,6 @@ from .file_manager import CachingFileManager from .locks import HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock - # psuedonetcdf can invoke netCDF libraries internally PNETCDF_LOCK = combine_locks([HDF5_LOCK, NETCDFC_LOCK]) diff --git a/xarray/backends/pynio_.py b/xarray/backends/pynio_.py index b171192ed6a..03507ab6c2c 100644 --- a/xarray/backends/pynio_.py +++ b/xarray/backends/pynio_.py @@ -8,8 +8,7 @@ from .common import AbstractDataStore, BackendArray from .file_manager import CachingFileManager from .locks import ( - HDF5_LOCK, NETCDFC_LOCK, combine_locks, ensure_lock, SerializableLock) - + HDF5_LOCK, NETCDFC_LOCK, SerializableLock, combine_locks, ensure_lock) # PyNIO can invoke netCDF libraries internally # Add a dedicated lock just in case NCL as well isn't thread-safe. diff --git a/xarray/backends/rasterio_.py b/xarray/backends/rasterio_.py index 24874d63f93..26d408d50f6 100644 --- a/xarray/backends/rasterio_.py +++ b/xarray/backends/rasterio_.py @@ -11,7 +11,6 @@ from .file_manager import CachingFileManager from .locks import SerializableLock - # TODO: should this be GDAL_LOCK instead? RASTERIO_LOCK = SerializableLock() diff --git a/xarray/backends/scipy_.py b/xarray/backends/scipy_.py index 157ae44f547..5739c1a8617 100644 --- a/xarray/backends/scipy_.py +++ b/xarray/backends/scipy_.py @@ -11,8 +11,8 @@ from ..core.pycompat import OrderedDict, basestring, iteritems from ..core.utils import Frozen, FrozenOrderedDict from .common import BackendArray, WritableCFDataStore -from .locks import ensure_lock, get_write_lock from .file_manager import CachingFileManager, DummyFileManager +from .locks import ensure_lock, get_write_lock from .netcdf3 import ( encode_nc3_attr_value, encode_nc3_variable, is_valid_nc3_name) diff --git a/xarray/backends/zarr.py b/xarray/backends/zarr.py index 05e445a1e88..feefaf1735f 100644 --- a/xarray/backends/zarr.py +++ b/xarray/backends/zarr.py @@ -8,7 +8,7 @@ from ..core import indexing from ..core.pycompat import OrderedDict, integer_types, iteritems from ..core.utils import FrozenOrderedDict, HiddenKeyDict -from .common import AbstractWritableDataStore, ArrayWriter, BackendArray +from .common import AbstractWritableDataStore, BackendArray # need some special secret attributes to tell us the dimensions _DIMENSION_KEY = '_ARRAY_DIMENSIONS' @@ -237,7 +237,8 @@ def open_group(cls, store, mode='r', synchronizer=None, group=None, "#installation" % min_zarr) if consolidated or consolidate_on_close: - if LooseVersion(zarr.__version__) <= '2.2.1.dev2': # pragma: no cover + if LooseVersion( + zarr.__version__) <= '2.2.1.dev2': # pragma: no cover raise NotImplementedError("Zarr version 2.2.1.dev2 or greater " "is required by for consolidated " "metadata.") diff --git a/xarray/coding/cftime_offsets.py b/xarray/coding/cftime_offsets.py index 9cdd74537d8..144b0fba9e1 100644 --- a/xarray/coding/cftime_offsets.py +++ b/xarray/coding/cftime_offsets.py @@ -419,7 +419,7 @@ def __apply__(self, other): _FREQUENCY_CONDITION = '|'.join(_FREQUENCIES.keys()) -_PATTERN = '^((?P\d+)|())(?P({0}))$'.format( +_PATTERN = r'^((?P\d+)|())(?P({0}))$'.format( _FREQUENCY_CONDITION) @@ -726,10 +726,10 @@ def cftime_range(start=None, end=None, periods=None, freq='D', raise ValueError("Closed must be either 'left', 'right' or None") if (not left_closed and len(dates) and - start is not None and dates[0] == start): + start is not None and dates[0] == start): dates = dates[1:] if (not right_closed and len(dates) and - end is not None and dates[-1] == end): + end is not None and dates[-1] == end): dates = dates[:-1] return CFTimeIndex(dates, name=name) diff --git a/xarray/coding/cftimeindex.py b/xarray/coding/cftimeindex.py index 98954e9af0c..82281b0d849 100644 --- a/xarray/coding/cftimeindex.py +++ b/xarray/coding/cftimeindex.py @@ -51,7 +51,7 @@ from xarray.core import pycompat from xarray.core.utils import is_scalar -from .times import cftime_to_nptime, infer_calendar_name, _STANDARD_CALENDARS +from .times import _STANDARD_CALENDARS, cftime_to_nptime, infer_calendar_name def named(name, pattern): @@ -68,13 +68,13 @@ def trailing_optional(xs): return xs[0] + optional(trailing_optional(xs[1:])) -def build_pattern(date_sep='\-', datetime_sep='T', time_sep='\:'): - pieces = [(None, 'year', '\d{4}'), - (date_sep, 'month', '\d{2}'), - (date_sep, 'day', '\d{2}'), - (datetime_sep, 'hour', '\d{2}'), - (time_sep, 'minute', '\d{2}'), - (time_sep, 'second', '\d{2}')] +def build_pattern(date_sep=r'\-', datetime_sep=r'T', time_sep=r'\:'): + pieces = [(None, 'year', r'\d{4}'), + (date_sep, 'month', r'\d{2}'), + (date_sep, 'day', r'\d{2}'), + (datetime_sep, 'hour', r'\d{2}'), + (time_sep, 'minute', r'\d{2}'), + (time_sep, 'second', r'\d{2}')] pattern_list = [] for sep, name, sub_pattern in pieces: pattern_list.append((sep if sep else '') + named(name, sub_pattern)) @@ -152,6 +152,7 @@ def get_date_field(datetimes, field): def _field_accessor(name, docstring=None): """Adapted from pandas.tseries.index._field_accessor""" + def f(self): return get_date_field(self._data, name) diff --git a/xarray/core/combine.py b/xarray/core/combine.py index e34bb05b3c1..2023c9ea30d 100644 --- a/xarray/core/combine.py +++ b/xarray/core/combine.py @@ -1,7 +1,7 @@ from __future__ import absolute_import, division, print_function -import warnings import itertools +import warnings from collections import Counter import pandas as pd @@ -378,7 +378,7 @@ def _infer_concat_order_from_positions(datasets, concat_dims): tile_id, ds = list(combined_ids.items())[0] n_dims = len(tile_id) if concat_dims == _CONCAT_DIM_DEFAULT or concat_dims is None: - concat_dims = [concat_dims]*n_dims + concat_dims = [concat_dims] * n_dims else: if len(concat_dims) != n_dims: raise ValueError("concat_dims has length {} but the datasets " @@ -533,8 +533,8 @@ def _auto_combine(datasets, concat_dims, compat, data_vars, coords, if not ids: # Determine tile_IDs by structure of input in N-D # (i.e. ordering in list-of-lists) - combined_ids, concat_dims = _infer_concat_order_from_positions\ - (datasets, concat_dims) + combined_ids, concat_dims = _infer_concat_order_from_positions( + datasets, concat_dims) else: # Already sorted so just use the ids already passed combined_ids = OrderedDict(zip(ids, datasets)) diff --git a/xarray/core/common.py b/xarray/core/common.py index c0a0201c7ce..5b090bf0d2f 100644 --- a/xarray/core/common.py +++ b/xarray/core/common.py @@ -1,7 +1,5 @@ from __future__ import absolute_import, division, print_function -import warnings -from distutils.version import LooseVersion from textwrap import dedent import numpy as np @@ -9,9 +7,9 @@ from . import dtypes, duck_array_ops, formatting, ops from .arithmetic import SupportsArithmetic +from .options import _get_keep_attrs from .pycompat import OrderedDict, basestring, dask_array_type, suppress from .utils import Frozen, ReprObject, SortedKeysDict, either_dict_or_kwargs -from .options import _get_keep_attrs # Used as a sentinel value to indicate a all dimensions ALL_DIMS = ReprObject('') @@ -96,7 +94,7 @@ def __complex__(self): return complex(self.values) def __long__(self): - return long(self.values) # flake8: noqa + return long(self.values) # noqa def __array__(self, dtype=None): return np.asarray(self.values, dtype=dtype) @@ -208,7 +206,7 @@ def _ipython_key_completions_(self): """Provide method for the key-autocompletions in IPython. See http://ipython.readthedocs.io/en/stable/config/integrating.html#tab-completion For the details. - """ + """ # noqa item_lists = [item for sublist in self._item_sources for item in sublist @@ -426,7 +424,8 @@ def pipe(self, func, *args, **kwargs): if isinstance(func, tuple): func, target = func if target in kwargs: - msg = '%s is both the pipe target and a keyword argument' % target + msg = ('%s is both the pipe target and a keyword argument' + % target) raise ValueError(msg) kwargs[target] = self return func(*args, **kwargs) @@ -476,7 +475,7 @@ def groupby(self, group, squeeze=True): -------- core.groupby.DataArrayGroupBy core.groupby.DatasetGroupBy - """ + """ # noqa return self._groupby_cls(self, group, squeeze=squeeze) def groupby_bins(self, group, bins, right=True, labels=None, precision=3, @@ -525,7 +524,7 @@ def groupby_bins(self, group, bins, right=True, labels=None, precision=3, References ---------- .. [1] http://pandas.pydata.org/pandas-docs/stable/generated/pandas.cut.html - """ + """ # noqa return self._groupby_cls(self, group, squeeze=squeeze, bins=bins, cut_kwargs={'right': right, 'labels': labels, 'precision': precision, @@ -586,7 +585,7 @@ def rolling(self, dim=None, min_periods=None, center=False, **dim_kwargs): -------- core.rolling.DataArrayRolling core.rolling.DatasetRolling - """ + """ # noqa dim = either_dict_or_kwargs(dim, dim_kwargs, 'rolling') return self._rolling_cls(self, dim, min_periods=min_periods, center=center) @@ -659,7 +658,7 @@ def resample(self, indexer=None, skipna=None, closed=None, label=None, ---------- .. [1] http://pandas.pydata.org/pandas-docs/stable/timeseries.html#offset-aliases - """ + """ # noqa # TODO support non-string indexer after removing the old API. from .dataarray import DataArray @@ -673,10 +672,11 @@ def resample(self, indexer=None, skipna=None, closed=None, label=None, if ((skipna is not None and not isinstance(skipna, bool)) or ('how' in indexer_kwargs and 'how' not in self.dims) or ('dim' in indexer_kwargs and 'dim' not in self.dims)): - raise TypeError('resample() no longer supports the `how` or ' - '`dim` arguments. Instead call methods on resample ' - "objects, e.g., data.resample(time='1D').mean()") - + raise TypeError( + 'resample() no longer supports the `how` or ' + '`dim` arguments. Instead call methods on resample ' + "objects, e.g., data.resample(time='1D').mean()") + indexer = either_dict_or_kwargs(indexer, indexer_kwargs, 'resample') if len(indexer) != 1: diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index e04648bd0b3..e8531a62f4f 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -16,7 +16,7 @@ assert_coordinate_consistent, remap_label_indexers) from .dataset import Dataset, merge_indexes, split_indexes from .formatting import format_item -from .options import OPTIONS, _get_keep_attrs +from .options import OPTIONS from .pycompat import OrderedDict, basestring, iteritems, range, zip from .utils import ( _check_inplace, decode_numpy_dict_values, either_dict_or_kwargs, diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index 686ffb37adc..7ac3b458232 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -13,9 +13,8 @@ import xarray as xr from . import ( - alignment, computation, duck_array_ops, formatting, groupby, indexing, ops, - pdcompat, resample, rolling, utils) -from .. import conventions + alignment, duck_array_ops, formatting, groupby, indexing, ops, pdcompat, + resample, rolling, utils) from ..coding.cftimeindex import _parse_array_of_cftime_strings from .alignment import align from .common import ( @@ -24,15 +23,14 @@ from .coordinates import ( DatasetCoordinates, Indexes, LevelCoordinatesSource, assert_coordinate_consistent, remap_label_indexers) -from .dtypes import is_datetime_like from .merge import ( dataset_merge_method, dataset_update_method, merge_data_and_coords, merge_variables) from .options import OPTIONS, _get_keep_attrs from .pycompat import ( - OrderedDict, basestring, dask_array_type, integer_types, iteritems, range) + OrderedDict, basestring, dask_array_type, iteritems, range) from .utils import ( - _check_inplace, Frozen, SortedKeysDict, datetime_to_numeric, + Frozen, SortedKeysDict, _check_inplace, datetime_to_numeric, decode_numpy_dict_values, either_dict_or_kwargs, ensure_us_time_resolution, hashable, maybe_wrap_array) from .variable import IndexVariable, Variable, as_variable, broadcast_variables @@ -516,7 +514,6 @@ def __dask_graph__(self): from dask import sharedict return sharedict.merge(*graphs.values()) - def __dask_keys__(self): import dask return [v.__dask_keys__() for v in self.variables.values() @@ -814,7 +811,7 @@ def copy(self, deep=False, data=None): See Also -------- pandas.DataFrame.copy - """ + """ # noqa if data is None: variables = OrderedDict((k, v.copy(deep=deep)) for k, v in iteritems(self._variables)) @@ -825,14 +822,16 @@ def copy(self, deep=False, data=None): data_keys = set(data.keys()) keys_not_in_vars = data_keys - var_keys if keys_not_in_vars: - raise ValueError('Data must only contain variables in original ' - 'dataset. Extra variables: {}' - .format(keys_not_in_vars)) + raise ValueError( + 'Data must only contain variables in original ' + 'dataset. Extra variables: {}' + .format(keys_not_in_vars)) keys_missing_from_data = var_keys - data_keys if keys_missing_from_data: - raise ValueError('Data must contain all variables in original ' - 'dataset. Data is missing {}' - .format(keys_missing_from_data)) + raise ValueError( + 'Data must contain all variables in original ' + 'dataset. Data is missing {}' + .format(keys_missing_from_data)) variables = OrderedDict((k, v.copy(deep=deep, data=data.get(k))) for k, v in iteritems(self._variables)) @@ -1175,7 +1174,8 @@ def to_netcdf(self, path=None, mode='w', format=None, group=None, Write ('w') or append ('a') mode. If mode='w', any existing file at this location will be overwritten. If mode='a', existing variables will be overwritten. - format : {'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_64BIT','NETCDF3_CLASSIC'}, optional + format : {'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_64BIT', + 'NETCDF3_CLASSIC'}, optional File format for the resulting netCDF file: * NETCDF4: Data is stored in an HDF5 file, using netCDF4 API @@ -1362,7 +1362,8 @@ def chunk(self, chunks=None, name_prefix='xarray-', token=None, try: from dask.base import tokenize except ImportError: - import dask # raise the usual error if dask is entirely missing # flake8: noqa + # raise the usual error if dask is entirely missing + import dask # noqa raise ImportError('xarray requires dask version 0.9 or newer') if isinstance(chunks, Number): @@ -1647,7 +1648,7 @@ def isel_points(self, dim='points', **indexers): Dataset.isel Dataset.sel_points DataArray.isel_points - """ + """ # noqa warnings.warn('Dataset.isel_points is deprecated: use Dataset.isel()' 'instead.', DeprecationWarning, stacklevel=2) @@ -1798,7 +1799,7 @@ def sel_points(self, dim='points', method=None, tolerance=None, Dataset.isel Dataset.isel_points DataArray.sel_points - """ + """ # noqa warnings.warn('Dataset.sel_points is deprecated: use Dataset.sel()' 'instead.', DeprecationWarning, stacklevel=2) @@ -1863,9 +1864,9 @@ def reindex(self, indexers=None, method=None, tolerance=None, copy=True, ---------- indexers : dict. optional Dictionary with keys given by dimension names and values given by - arrays of coordinates tick labels. Any mis-matched coordinate values - will be filled in with NaN, and any mis-matched dimension names will - simply be ignored. + arrays of coordinates tick labels. Any mis-matched coordinate + values will be filled in with NaN, and any mis-matched dimension + names will simply be ignored. One of indexers or indexers_kwargs must be provided. method : {None, 'nearest', 'pad'/'ffill', 'backfill'/'bfill'}, optional Method to use for filling index values in ``indexers`` not found in @@ -1973,12 +1974,12 @@ def _validate_interp_indexer(x, new_x): # used with interp are stronger than those which are placed on # isel, so we need an additional check after _validate_indexers. if (_contains_datetime_like_objects(x) and - not _contains_datetime_like_objects(new_x)): - raise TypeError('When interpolating over a datetime-like ' - 'coordinate, the coordinates to ' - 'interpolate to must be either datetime ' - 'strings or datetimes. ' - 'Instead got\n{}'.format(new_x)) + not _contains_datetime_like_objects(new_x)): + raise TypeError('When interpolating over a datetime-like ' + 'coordinate, the coordinates to ' + 'interpolate to must be either datetime ' + 'strings or datetimes. ' + 'Instead got\n{}'.format(new_x)) else: return (x, new_x) @@ -1988,7 +1989,7 @@ def _validate_interp_indexer(x, new_x): if var.dtype.kind in 'uifc': var_indexers = {k: _validate_interp_indexer( maybe_variable(obj, k), v) for k, v - in indexers.items() if k in var.dims} + in indexers.items() if k in var.dims} variables[name] = missing.interp( var, var_indexers, method, **kwargs) elif all(d not in indexers for d in var.dims): @@ -2911,7 +2912,7 @@ def reduce(self, func, dim=None, keep_attrs=None, numeric_only=False, else: dims = set(dim) - missing_dimensions = [dim for dim in dims if dim not in self.dims] + missing_dimensions = [d for d in dims if d not in self.dims] if missing_dimensions: raise ValueError('Dataset does not contain the dimensions: %s' % missing_dimensions) @@ -2921,7 +2922,7 @@ def reduce(self, func, dim=None, keep_attrs=None, numeric_only=False, variables = OrderedDict() for name, var in iteritems(self._variables): - reduce_dims = [dim for dim in var.dims if dim in dims] + reduce_dims = [d for d in var.dims if d in dims] if name in self.coords: if not reduce_dims: variables[name] = var @@ -2988,7 +2989,7 @@ def apply(self, func, keep_attrs=None, args=(), **kwargs): Data variables: foo (dim_0, dim_1) float64 0.3751 1.951 1.945 0.2948 0.711 0.3948 bar (x) float64 1.0 2.0 - """ + """ # noqa variables = OrderedDict( (k, maybe_wrap_array(v, func(v, *args, **kwargs))) for k, v in iteritems(self.data_vars)) @@ -3030,7 +3031,8 @@ def assign(self, variables=None, **variables_kwargs): -------- pandas.DataFrame.assign """ - variables = either_dict_or_kwargs(variables, variables_kwargs, 'assign') + variables = either_dict_or_kwargs( + variables, variables_kwargs, 'assign') data = self.copy() # do all calculations first... results = data._calc_assign_results(variables) @@ -3709,14 +3711,14 @@ def quantile(self, q, dim=None, interpolation='linear', else: dims = set(dim) - _assert_empty([dim for dim in dims if dim not in self.dims], + _assert_empty([d for d in dims if d not in self.dims], 'Dataset does not contain the dimensions: %s') q = np.asarray(q, dtype=np.float64) variables = OrderedDict() for name, var in iteritems(self.variables): - reduce_dims = [dim for dim in var.dims if dim in dims] + reduce_dims = [d for d in var.dims if d in dims] if reduce_dims or not var.dims: if name not in self.coords: if (not numeric_only or @@ -3749,7 +3751,8 @@ def rank(self, dim, pct=False, keep_attrs=None): """Ranks the data. Equal values are assigned a rank that is the average of the ranks that - would have been otherwise assigned to all of the values within that set. + would have been otherwise assigned to all of the values within + that set. Ranks begin at 1, not 0. If pct is True, computes percentage ranks. NaNs in the input array are returned as NaNs. @@ -3832,7 +3835,8 @@ def differentiate(self, coord, edge_order=1, datetime_unit=None): datetime_unit, _ = np.datetime_data(coord_var.dtype) elif datetime_unit is None: datetime_unit = 's' # Default to seconds for cftime objects - coord_var = datetime_to_numeric(coord_var, datetime_unit=datetime_unit) + coord_var = datetime_to_numeric( + coord_var, datetime_unit=datetime_unit) variables = OrderedDict() for k, v in self.variables.items(): @@ -3935,7 +3939,7 @@ def filter_by_attrs(self, **kwargs): temperature (x, y, time) float64 25.86 20.82 6.954 23.13 10.25 11.68 ... precipitation (x, y, time) float64 5.702 0.9422 2.075 1.178 3.284 ... - """ + """ # noqa selection = [] for var_name, variable in self.data_vars.items(): has_value_flag = False diff --git a/xarray/core/groupby.py b/xarray/core/groupby.py index 9e8c1e6bbc3..ec8329d6805 100644 --- a/xarray/core/groupby.py +++ b/xarray/core/groupby.py @@ -1,9 +1,9 @@ from __future__ import absolute_import, division, print_function +import datetime import functools import warnings -import datetime import numpy as np import pandas as pd @@ -11,10 +11,10 @@ from .arithmetic import SupportsArithmetic from .combine import concat from .common import ALL_DIMS, ImplementsArrayReduce, ImplementsDatasetReduce +from .options import _get_keep_attrs from .pycompat import integer_types, range, zip from .utils import hashable, maybe_wrap_array, peek_at, safe_cast_to_index from .variable import IndexVariable, Variable, as_variable -from .options import _get_keep_attrs def unique_value_groups(ar, sort=True): diff --git a/xarray/core/options.py b/xarray/core/options.py index 400508a5d59..db8e696eedf 100644 --- a/xarray/core/options.py +++ b/xarray/core/options.py @@ -66,8 +66,9 @@ def _get_keep_attrs(default): elif global_choice in [True, False]: return global_choice else: - raise ValueError("The global option keep_attrs must be one of True, " - "False or 'default'.") + raise ValueError( + "The global option keep_attrs must be one of" + " True, False or 'default'.") class set_options(object): diff --git a/xarray/core/utils.py b/xarray/core/utils.py index 50d6ec7e05a..fbda658c23f 100644 --- a/xarray/core/utils.py +++ b/xarray/core/utils.py @@ -508,7 +508,7 @@ def close_on_error(f): def is_remote_uri(path): - return bool(re.search('^https?\://', path)) + return bool(re.search(r'^https?\://', path)) def is_grib_path(path): diff --git a/xarray/core/variable.py b/xarray/core/variable.py index 469e8741a29..cabab259446 100644 --- a/xarray/core/variable.py +++ b/xarray/core/variable.py @@ -15,10 +15,10 @@ from .indexing import ( BasicIndexer, OuterIndexer, PandasIndexAdapter, VectorizedIndexer, as_indexable) +from .options import _get_keep_attrs from .pycompat import ( OrderedDict, basestring, dask_array_type, integer_types, zip) from .utils import OrderedSet, either_dict_or_kwargs -from .options import _get_keep_attrs try: import dask.array as da diff --git a/xarray/plot/plot.py b/xarray/plot/plot.py index 8d21e084946..1f7b8d8587a 100644 --- a/xarray/plot/plot.py +++ b/xarray/plot/plot.py @@ -14,7 +14,6 @@ import numpy as np import pandas as pd -from xarray.core.alignment import align from xarray.core.common import contains_cftime_datetimes from xarray.core.pycompat import basestring @@ -255,7 +254,6 @@ def _infer_line_data(darray, x, y, hue): huelabel = label_from_attrs(darray[huename]) hueplt = darray[huename] - xlabel = label_from_attrs(xplt) ylabel = label_from_attrs(yplt) diff --git a/xarray/testing.py b/xarray/testing.py index 03c5354cdff..c2bb5044ef4 100644 --- a/xarray/testing.py +++ b/xarray/testing.py @@ -145,4 +145,3 @@ def assert_combined_tile_ids_equal(dict1, dict2): for k, v in dict1.items(): assert k in dict2.keys() assert_equal(dict1[k], dict2[k]) - diff --git a/xarray/tests/test_backends.py b/xarray/tests/test_backends.py index f4d9154eadb..993db79a66e 100644 --- a/xarray/tests/test_backends.py +++ b/xarray/tests/test_backends.py @@ -23,17 +23,18 @@ from xarray.backends.netCDF4_ import _extract_nc4_variable_encoding from xarray.backends.pydap_ import PydapDataStore from xarray.core import indexing +from xarray.core.options import set_options from xarray.core.pycompat import ( ExitStack, basestring, dask_array_type, iteritems) -from xarray.core.options import set_options from xarray.tests import mock from . import ( assert_allclose, assert_array_equal, assert_equal, assert_identical, - has_dask, has_netCDF4, has_scipy, network, raises_regex, requires_cftime, - requires_dask, requires_h5netcdf, requires_netCDF4, requires_pathlib, - requires_pseudonetcdf, requires_pydap, requires_pynio, requires_rasterio, - requires_scipy, requires_scipy_or_netCDF4, requires_zarr, requires_cfgrib) + has_dask, has_netCDF4, has_scipy, network, raises_regex, requires_cfgrib, + requires_cftime, requires_dask, requires_h5netcdf, requires_netCDF4, + requires_pathlib, requires_pseudonetcdf, requires_pydap, requires_pynio, + requires_rasterio, requires_scipy, requires_scipy_or_netCDF4, + requires_zarr) from .test_dataset import create_test_data try: @@ -662,7 +663,8 @@ def test_roundtrip_string_with_fill_value_nchar(self): create_encoded_unsigned_masked_scaled_data), pytest.param(create_bad_unsigned_masked_scaled_data, create_bad_encoded_unsigned_masked_scaled_data, - marks=pytest.mark.xfail(reason="Bad _Unsigned attribute.")), + marks=pytest.mark.xfail( + reason="Bad _Unsigned attribute.")), (create_signed_masked_scaled_data, create_encoded_signed_masked_scaled_data), (create_masked_and_scaled_data, @@ -1348,7 +1350,7 @@ def roundtrip_append(self, data, save_kwargs={}, open_kwargs={}, pytest.skip("zarr backend does not support appending") def test_roundtrip_consolidated(self): - zarr = pytest.importorskip('zarr', minversion="2.2.1.dev2") + pytest.importorskip('zarr', minversion="2.2.1.dev2") expected = create_test_data() with self.roundtrip(expected, save_kwargs={'consolidated': True}, @@ -2168,7 +2170,6 @@ def test_open_mfdataset(self): with open_mfdataset([tmp1, tmp2], chunks={'x': 3}) as actual: assert actual.foo.variable.data.chunks == ((3, 2, 3, 2),) - with raises_regex(IOError, 'no files to open'): open_mfdataset('foo-bar-baz-*.nc') @@ -2195,14 +2196,14 @@ def test_open_mfdataset_2d(self): assert isinstance(actual.foo.variable.data, da.Array) assert actual.foo.variable.data.chunks == \ - ((5, 5), (4, 4)) + ((5, 5), (4, 4)) assert_identical(original, actual) with open_mfdataset([[tmp1, tmp2], [tmp3, tmp4]], concat_dim=['y', 'x'], chunks={'x': 3, 'y': 2}) as actual: assert actual.foo.variable.data.chunks == \ - ((3, 2, 3, 2), (2, 2, 2, 2),) + ((3, 2, 3, 2), (2, 2, 2, 2),) @requires_pathlib def test_open_mfdataset_pathlib(self): @@ -2241,7 +2242,7 @@ def test_open_mfdataset_2d_pathlib(self): assert_identical(original, actual) @pytest.mark.xfail(reason="Not yet implemented") - def test_open_mfdataset(self): + def test_open_mfdataset_2(self): original = Dataset({'foo': ('x', np.random.randn(10))}) with create_tmp_file() as tmp1: with create_tmp_file() as tmp2: @@ -2963,7 +2964,8 @@ def test_indexing(self): assert_allclose(expected.isel(**ind), actual.isel(**ind)) assert not actual.variable._in_memory - ind = {'band': 0, 'x': np.array([0, 0]), 'y': np.array([1, 1, 1])} + ind = {'band': 0, 'x': np.array( + [0, 0]), 'y': np.array([1, 1, 1])} assert_allclose(expected.isel(**ind), actual.isel(**ind)) assert not actual.variable._in_memory diff --git a/xarray/tests/test_backends_api.py b/xarray/tests/test_backends_api.py index ed49dd721d2..2b025db8cab 100644 --- a/xarray/tests/test_backends_api.py +++ b/xarray/tests/test_backends_api.py @@ -2,6 +2,7 @@ import pytest from xarray.backends.api import _get_default_engine + from . import requires_netCDF4, requires_scipy diff --git a/xarray/tests/test_backends_file_manager.py b/xarray/tests/test_backends_file_manager.py index 3b618f35ea7..9c4c1cf815c 100644 --- a/xarray/tests/test_backends_file_manager.py +++ b/xarray/tests/test_backends_file_manager.py @@ -1,11 +1,6 @@ -import collections import gc import pickle import threading -try: - from unittest import mock -except ImportError: - import mock # noqa: F401 import pytest @@ -13,6 +8,11 @@ from xarray.backends.lru_cache import LRUCache from xarray.core.options import set_options +try: + from unittest import mock +except ImportError: + import mock # noqa: F401 + @pytest.fixture(params=[1, 2, 3, None]) def file_cache(request): diff --git a/xarray/tests/test_cftimeindex.py b/xarray/tests/test_cftimeindex.py index ea41115937b..4c91bbd6195 100644 --- a/xarray/tests/test_cftimeindex.py +++ b/xarray/tests/test_cftimeindex.py @@ -12,9 +12,9 @@ _parsed_string_to_bounds, assert_all_valid_date_type, parse_iso8601) from xarray.tests import assert_array_equal, assert_identical -from . import has_cftime, has_cftime_or_netCDF4, requires_cftime, raises_regex -from .test_coding_times import (_all_cftime_date_types, _ALL_CALENDARS, - _NON_STANDARD_CALENDARS) +from . import has_cftime, has_cftime_or_netCDF4, raises_regex, requires_cftime +from .test_coding_times import ( + _ALL_CALENDARS, _NON_STANDARD_CALENDARS, _all_cftime_date_types) def date_dict(year=None, month=None, day=None, diff --git a/xarray/tests/test_coding_times.py b/xarray/tests/test_coding_times.py index b7db2b43cab..d9a40c23add 100644 --- a/xarray/tests/test_coding_times.py +++ b/xarray/tests/test_coding_times.py @@ -8,8 +8,8 @@ import pytest from xarray import DataArray, Variable, coding, decode_cf -from xarray.coding.times import (_import_cftime, cftime_to_nptime, - decode_cf_datetime, encode_cf_datetime) +from xarray.coding.times import ( + _import_cftime, cftime_to_nptime, decode_cf_datetime, encode_cf_datetime) from xarray.conventions import _update_bounds_attributes from xarray.core.common import contains_cftime_datetimes diff --git a/xarray/tests/test_combine.py b/xarray/tests/test_combine.py index ec2288b1d2d..9ea38e7d5f2 100644 --- a/xarray/tests/test_combine.py +++ b/xarray/tests/test_combine.py @@ -7,16 +7,16 @@ import pandas as pd import pytest -from xarray import DataArray, Dataset, Variable, auto_combine, concat, merge -from xarray.core.pycompat import OrderedDict, iteritems +from xarray import DataArray, Dataset, Variable, auto_combine, concat from xarray.core.combine import ( - _new_tile_id, _auto_combine_all_along_first_dim, - _infer_concat_order_from_positions, _infer_tile_ids_from_nested_list, - _check_shape_tile_ids, _combine_nd, _auto_combine_1d, _auto_combine) + _auto_combine, _auto_combine_1d, _auto_combine_all_along_first_dim, + _check_shape_tile_ids, _combine_nd, _infer_concat_order_from_positions, + _infer_tile_ids_from_nested_list, _new_tile_id) +from xarray.core.pycompat import OrderedDict, iteritems from . import ( - InaccessibleArray, assert_array_equal, assert_equal, assert_identical, - assert_combined_tile_ids_equal, raises_regex, requires_dask) + InaccessibleArray, assert_array_equal, assert_combined_tile_ids_equal, + assert_equal, assert_identical, raises_regex, requires_dask) from .test_dataset import create_test_data @@ -492,8 +492,8 @@ def test_infer_from_datasets(self): input = [ds(0), ds(1)] expected = {(0,): ds(0), (1,): ds(1)} - actual, concat_dims = _infer_concat_order_from_positions\ - (input, ['dim1']) + actual, concat_dims = _infer_concat_order_from_positions(input, [ + 'dim1']) assert_combined_tile_ids_equal(expected, actual) input = [ds(0), ds(1)] @@ -520,11 +520,11 @@ def _create_tile_ids(shape): @requires_dask # only for toolz class TestCombineND(object): - @pytest.mark.parametrize("old_id, new_id", [((3,0,1), (0,1)), - ((0, 0), (0,)), - ((1,), ()), - ((0,), ()), - ((1, 0), (0,))]) + @pytest.mark.parametrize("old_id, new_id", [((3, 0, 1), (0, 1)), + ((0, 0), (0,)), + ((1,), ()), + ((0,), ()), + ((1, 0), (0,))]) def test_new_tile_id(self, old_id, new_id): ds = create_test_data assert _new_tile_id((old_id, ds)) == new_id @@ -594,7 +594,7 @@ def test_check_depths(self): def test_check_lengths(self): ds = create_test_data(0) - combined_tile_ids = {(0, 0): ds, (0, 1): ds , (0, 2): ds, + combined_tile_ids = {(0, 0): ds, (0, 1): ds, (0, 2): ds, (1, 0): ds, (1, 1): ds} with raises_regex(ValueError, 'sub-lists do not have ' 'consistent lengths'): @@ -643,8 +643,10 @@ def test_invalid_hypercube_input(self): auto_combine(datasets, concat_dim=['dim1']) def test_merge_one_dim_concat_another(self): - objs = [[Dataset({'foo': ('x', [0, 1])}), Dataset({'bar': ('x', [10, 20])})], - [Dataset({'foo': ('x', [2, 3])}), Dataset({'bar': ('x', [30, 40])})]] + objs = [[Dataset({'foo': ('x', [0, 1])}), + Dataset({'bar': ('x', [10, 20])})], + [Dataset({'foo': ('x', [2, 3])}), + Dataset({'bar': ('x', [30, 40])})]] expected = Dataset({'foo': ('x', [0, 1, 2, 3]), 'bar': ('x', [10, 20, 30, 40])}) @@ -655,8 +657,10 @@ def test_merge_one_dim_concat_another(self): assert_identical(expected, actual) # Proving it works symmetrically - objs = [[Dataset({'foo': ('x', [0, 1])}), Dataset({'foo': ('x', [2, 3])})], - [Dataset({'bar': ('x', [10, 20])}), Dataset({'bar': ('x', [30, 40])})]] + objs = [[Dataset({'foo': ('x', [0, 1])}), + Dataset({'foo': ('x', [2, 3])})], + [Dataset({'bar': ('x', [10, 20])}), + Dataset({'bar': ('x', [30, 40])})]] actual = auto_combine(objs, concat_dim=[None, 'x']) assert_identical(expected, actual) diff --git a/xarray/tests/test_dataarray.py b/xarray/tests/test_dataarray.py index fe05da16ad0..5468905a320 100644 --- a/xarray/tests/test_dataarray.py +++ b/xarray/tests/test_dataarray.py @@ -11,7 +11,7 @@ import xarray as xr from xarray import ( - DataArray, Dataset, IndexVariable, Variable, align, broadcast, set_options) + DataArray, Dataset, IndexVariable, Variable, align, broadcast) from xarray.coding.times import CFDatetimeCoder, _import_cftime from xarray.convert import from_cdms2 from xarray.core.common import ALL_DIMS, full_like diff --git a/xarray/tests/test_distributed.py b/xarray/tests/test_distributed.py index bd62b8d906d..3a79d40c226 100644 --- a/xarray/tests/test_distributed.py +++ b/xarray/tests/test_distributed.py @@ -1,22 +1,17 @@ """ isort:skip_file """ from __future__ import absolute_import, division, print_function -from distutils.version import LooseVersion -import os -import sys import pickle -import tempfile import pytest dask = pytest.importorskip('dask', minversion='0.18') # isort:skip -distributed = pytest.importorskip('distributed', minversion='1.21') # isort:skip +distributed = pytest.importorskip( + 'distributed', minversion='1.21') # isort:skip -from dask import array from dask.distributed import Client, Lock from distributed.utils_test import cluster, gen_cluster -from distributed.utils_test import loop # flake8: noqa +from distributed.utils_test import loop # noqa from distributed.client import futures_of -import numpy as np import xarray as xr from xarray.backends.locks import HDF5_LOCK, CombinedLock @@ -27,7 +22,7 @@ from . import ( assert_allclose, has_h5netcdf, has_netCDF4, requires_rasterio, has_scipy, - requires_zarr, requires_cfgrib, raises_regex) + requires_zarr, requires_cfgrib) # this is to stop isort throwing errors. May have been easier to just use # `isort:skip` in retrospect @@ -63,7 +58,7 @@ def tmp_netcdf_filename(tmpdir): ] -@pytest.mark.parametrize('engine,nc_format', ENGINES_AND_FORMATS) +@pytest.mark.parametrize('engine,nc_format', ENGINES_AND_FORMATS) # noqa def test_dask_distributed_netcdf_roundtrip( loop, tmp_netcdf_filename, engine, nc_format): @@ -73,7 +68,7 @@ def test_dask_distributed_netcdf_roundtrip( chunks = {'dim1': 4, 'dim2': 3, 'dim3': 6} with cluster() as (s, [a, b]): - with Client(s['address'], loop=loop) as c: + with Client(s['address'], loop=loop): original = create_test_data().chunk(chunks) @@ -93,7 +88,7 @@ def test_dask_distributed_netcdf_roundtrip( assert_allclose(original, computed) -@pytest.mark.parametrize('engine,nc_format', ENGINES_AND_FORMATS) +@pytest.mark.parametrize('engine,nc_format', ENGINES_AND_FORMATS) # noqa def test_dask_distributed_read_netcdf_integration_test( loop, tmp_netcdf_filename, engine, nc_format): @@ -103,7 +98,7 @@ def test_dask_distributed_read_netcdf_integration_test( chunks = {'dim1': 4, 'dim2': 3, 'dim3': 6} with cluster() as (s, [a, b]): - with Client(s['address'], loop=loop) as c: + with Client(s['address'], loop=loop): original = create_test_data() original.to_netcdf(tmp_netcdf_filename, @@ -117,20 +112,19 @@ def test_dask_distributed_read_netcdf_integration_test( assert_allclose(original, computed) - -@requires_zarr +@requires_zarr # noqar @pytest.mark.parametrize('consolidated', [True, False]) @pytest.mark.parametrize('compute', [True, False]) def test_dask_distributed_zarr_integration_test(loop, consolidated, compute): if consolidated: - zarr = pytest.importorskip('zarr', minversion="2.2.1.dev2") + pytest.importorskip('zarr', minversion="2.2.1.dev2") write_kwargs = dict(consolidated=True) read_kwargs = dict(consolidated=True) else: write_kwargs = read_kwargs = {} chunks = {'dim1': 4, 'dim2': 3, 'dim3': 5} with cluster() as (s, [a, b]): - with Client(s['address'], loop=loop) as c: + with Client(s['address'], loop=loop): original = create_test_data().chunk(chunks) with create_tmp_file(allow_cleanup_failure=ON_WINDOWS, suffix='.zarrc') as filename: @@ -144,21 +138,21 @@ def test_dask_distributed_zarr_integration_test(loop, consolidated, compute): assert_allclose(original, computed) -@requires_rasterio +@requires_rasterio # noqa def test_dask_distributed_rasterio_integration_test(loop): with create_tmp_geotiff() as (tmp_file, expected): with cluster() as (s, [a, b]): - with Client(s['address'], loop=loop) as c: + with Client(s['address'], loop=loop): da_tiff = xr.open_rasterio(tmp_file, chunks={'band': 1}) assert isinstance(da_tiff.data, da.Array) actual = da_tiff.compute() assert_allclose(actual, expected) -@requires_cfgrib +@requires_cfgrib # noqa def test_dask_distributed_cfgrib_integration_test(loop): with cluster() as (s, [a, b]): - with Client(s['address'], loop=loop) as c: + with Client(s['address'], loop=loop): with open_example_dataset('example.grib', engine='cfgrib', chunks={'time': 1}) as ds: diff --git a/xarray/tests/test_options.py b/xarray/tests/test_options.py index d594e1dcd18..3374ded39f0 100644 --- a/xarray/tests/test_options.py +++ b/xarray/tests/test_options.py @@ -3,10 +3,10 @@ import pytest import xarray -from xarray.core.options import OPTIONS, _get_keep_attrs +from xarray import concat, merge from xarray.backends.file_manager import FILE_CACHE +from xarray.core.options import OPTIONS, _get_keep_attrs from xarray.tests.test_dataset import create_test_data -from xarray import concat, merge def test_invalid_option_raises(): diff --git a/xarray/tests/test_plot.py b/xarray/tests/test_plot.py index 10c4283032d..a2c3adf191f 100644 --- a/xarray/tests/test_plot.py +++ b/xarray/tests/test_plot.py @@ -1161,8 +1161,8 @@ def test_facetgrid_no_cbar_ax(self): a = easy_array((10, 15, 2, 3)) d = DataArray(a, dims=['y', 'x', 'columns', 'rows']) with pytest.raises(ValueError): - g = self.plotfunc(d, x='x', y='y', col='columns', row='rows', - cbar_ax=1) + self.plotfunc(d, x='x', y='y', col='columns', row='rows', + cbar_ax=1) def test_cmap_and_color_both(self): with pytest.raises(ValueError): diff --git a/xarray/tests/test_utils.py b/xarray/tests/test_utils.py index ed07af0d7bb..fc14ae2350a 100644 --- a/xarray/tests/test_utils.py +++ b/xarray/tests/test_utils.py @@ -9,7 +9,6 @@ import xarray as xr from xarray.coding.cftimeindex import CFTimeIndex from xarray.core import duck_array_ops, utils -from xarray.core.options import set_options from xarray.core.pycompat import OrderedDict from xarray.core.utils import either_dict_or_kwargs from xarray.testing import assert_identical diff --git a/xarray/tests/test_variable.py b/xarray/tests/test_variable.py index d98783fe2dd..91bc0e555c0 100644 --- a/xarray/tests/test_variable.py +++ b/xarray/tests/test_variable.py @@ -12,7 +12,7 @@ import pytest import pytz -from xarray import Coordinate, Dataset, IndexVariable, Variable +from xarray import Coordinate, Dataset, IndexVariable, Variable, set_options from xarray.core import indexing from xarray.core.common import full_like, ones_like, zeros_like from xarray.core.indexing import ( @@ -28,8 +28,6 @@ assert_allclose, assert_array_equal, assert_equal, assert_identical, raises_regex, requires_dask, source_ndarray) -from xarray import set_options - class VariableSubclassobjects(object): def test_properties(self):