11from __future__ import absolute_import , division , print_function
22
33import os .path
4+ import warnings
45from glob import glob
56from io import BytesIO
67from numbers import Number
7- import warnings
88
99import numpy as np
1010
1111from .. import Dataset , backends , conventions
1212from ..core import indexing
13- from ..core .combine import _infer_concat_order_from_positions , _auto_combine
13+ from ..core .combine import _auto_combine , _infer_concat_order_from_positions
1414from ..core .pycompat import basestring , path_type
15- from ..core .utils import close_on_error , is_remote_uri , is_grib_path
15+ from ..core .utils import close_on_error , is_grib_path , is_remote_uri
1616from .common import ArrayWriter
1717from .locks import _get_scheduler
1818
19-
2019DATAARRAY_NAME = '__xarray_dataarray_name__'
2120DATAARRAY_VARIABLE = '__xarray_dataarray_variable__'
2221
2322
2423def _get_default_engine_remote_uri ():
2524 try :
26- import netCDF4
25+ import netCDF4 # noqa
2726 engine = 'netcdf4'
2827 except ImportError : # pragma: no cover
2928 try :
30- import pydap # flake8: noqa
29+ import pydap # noqa
3130 engine = 'pydap'
3231 except ImportError :
3332 raise ValueError ('netCDF4 or pydap is required for accessing '
@@ -38,12 +37,12 @@ def _get_default_engine_remote_uri():
3837def _get_default_engine_grib ():
3938 msgs = []
4039 try :
41- import Nio # flake8: noqa
40+ import Nio # noqa
4241 msgs += ["set engine='pynio' to access GRIB files with PyNIO" ]
4342 except ImportError : # pragma: no cover
4443 pass
4544 try :
46- import cfgrib # flake8: noqa
45+ import cfgrib # noqa
4746 msgs += ["set engine='cfgrib' to access GRIB files with cfgrib" ]
4847 except ImportError : # pragma: no cover
4948 pass
@@ -56,7 +55,7 @@ def _get_default_engine_grib():
5655
5756def _get_default_engine_gz ():
5857 try :
59- import scipy # flake8: noqa
58+ import scipy # noqa
6059 engine = 'scipy'
6160 except ImportError : # pragma: no cover
6261 raise ValueError ('scipy is required for accessing .gz files' )
@@ -65,11 +64,11 @@ def _get_default_engine_gz():
6564
6665def _get_default_engine_netcdf ():
6766 try :
68- import netCDF4 # flake8: noqa
67+ import netCDF4 # noqa
6968 engine = 'netcdf4'
7069 except ImportError : # pragma: no cover
7170 try :
72- import scipy .io .netcdf # flake8: noqa
71+ import scipy .io .netcdf # noqa
7372 engine = 'scipy'
7473 except ImportError :
7574 raise ValueError ('cannot read or write netCDF files without '
@@ -579,7 +578,7 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
579578
580579 .. [1] http://xarray.pydata.org/en/stable/dask.html
581580 .. [2] http://xarray.pydata.org/en/stable/dask.html#chunking-and-performance
582- """
581+ """ # noqa
583582 if isinstance (paths , basestring ):
584583 if is_remote_uri (paths ):
585584 raise ValueError (
@@ -642,11 +641,12 @@ def open_mfdataset(paths, chunks=None, concat_dim=_CONCAT_DIM_DEFAULT,
642641 # Discard ordering because it should be redone from coordinates
643642 ids = False
644643
645- combined = _auto_combine (datasets , concat_dims = concat_dims ,
646- compat = compat ,
647- data_vars = data_vars , coords = coords ,
648- infer_order_from_coords = infer_order_from_coords ,
649- ids = ids )
644+ combined = _auto_combine (
645+ datasets , concat_dims = concat_dims ,
646+ compat = compat ,
647+ data_vars = data_vars , coords = coords ,
648+ infer_order_from_coords = infer_order_from_coords ,
649+ ids = ids )
650650 except ValueError :
651651 for ds in datasets :
652652 ds .close ()
0 commit comments