From f99b783325e079db5930a21bccfb541564165b17 Mon Sep 17 00:00:00 2001 From: Thomas Nicholas Date: Wed, 25 Aug 2021 13:32:55 -0400 Subject: [PATCH 1/6] added methods from xarray.core._typed_ops.py to list to map over --- datatree/datatree.py | 52 +++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 51 insertions(+), 1 deletion(-) diff --git a/datatree/datatree.py b/datatree/datatree.py index a3df42d1..06bdc660 100644 --- a/datatree/datatree.py +++ b/datatree/datatree.py @@ -310,9 +310,59 @@ def imag(self): "astype", ] +_TYPED_DATASET_OPS_TO_MAP = [ + "__add__", + "__sub__", + "__mul__", + "__pow__", + "__truediv__", + "__floordiv__", + "__mod__", + "__and__", + "__xor__", + "__or__", + "__lt__", + "__le__", + "__gt__", + "__ge__", + "__eq__", + "__ne__", + "__radd__", + "__rsub__", + "__rmul__", + "__rpow__", + "__rtruediv__", + "__rfloordiv__", + "__rmod__", + "__rand__", + "__rxor__", + "__ror__", + "__iadd__", + "__isub__", + "__imul__", + "__ipow__", + "__itruediv__", + "__ifloordiv__", + "__imod__", + "__iand__", + "__ixor__", + "__ior__", + "__neg__", + "__pos__", + "__abs__", + "__invert__", + "round", + "argsort", + "conj", + "conjugate", +] # TODO NUM_BINARY_OPS apparently aren't defined on DatasetArithmetic, and don't appear to be injected anywhere... _ARITHMETIC_METHODS_TO_MAP = ( - REDUCE_METHODS + NAN_REDUCE_METHODS + NAN_CUM_METHODS + ["__array_ufunc__"] + REDUCE_METHODS + + NAN_REDUCE_METHODS + + NAN_CUM_METHODS + + _TYPED_DATASET_OPS_TO_MAP + + ["__array_ufunc__"] ) From 93827102177e758f00319c20ef5a6f2bc8610d2c Mon Sep 17 00:00:00 2001 From: Thomas Nicholas Date: Wed, 25 Aug 2021 13:36:10 -0400 Subject: [PATCH 2/6] test ops with non-datatrees acting on datatrees --- datatree/tests/test_dataset_api.py | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/datatree/tests/test_dataset_api.py b/datatree/tests/test_dataset_api.py index afda3588..b92b452f 100644 --- a/datatree/tests/test_dataset_api.py +++ b/datatree/tests/test_dataset_api.py @@ -155,8 +155,21 @@ def test_cum_method(self): class TestOps: + def test_binary_op_on_other_class(self): + ds1 = xr.Dataset({"a": [5], "b": [3]}) + ds2 = xr.Dataset({"x": [0.1, 0.2], "y": [10, 20]}) + dt = DataNode("root", data=ds1) + DataNode("subnode", data=ds2, parent=dt) + + expected_root = DataNode("root", data=ds1 * 5) + expected_descendant = DataNode("subnode", data=ds2 * 5, parent=expected_root) + result = dt * 5 + + assert_equal(result.ds, expected_root.ds) + assert_equal(result["subnode"].ds, expected_descendant.ds) + @pytest.mark.xfail - def test_binary_op(self): + def test_binary_op_on_datatree(self): ds1 = xr.Dataset({"a": [5], "b": [3]}) ds2 = xr.Dataset({"x": [0.1, 0.2], "y": [10, 20]}) dt = DataNode("root", data=ds1) From eea4c1ecb6c32a99775b5ce4b79b2137ada6e135 Mon Sep 17 00:00:00 2001 From: Thomas Nicholas Date: Thu, 2 Sep 2021 12:03:40 -0400 Subject: [PATCH 3/6] removed the xfails --- datatree/tests/test_dataset_api.py | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/datatree/tests/test_dataset_api.py b/datatree/tests/test_dataset_api.py index c0a0be1d..9af541c5 100644 --- a/datatree/tests/test_dataset_api.py +++ b/datatree/tests/test_dataset_api.py @@ -5,6 +5,8 @@ from datatree import DataNode +from .test_datatree import assert_tree_equal, create_test_datatree + class TestDSProperties: def test_properties(self): @@ -88,7 +90,7 @@ def test_cum_method(self): class TestOps: - def test_binary_op_on_other_class(self): + def test_binary_op_on_int(self): ds1 = xr.Dataset({"a": [5], "b": [3]}) ds2 = xr.Dataset({"x": [0.1, 0.2], "y": [10, 20]}) dt = DataNode("root", data=ds1) @@ -101,7 +103,20 @@ def test_binary_op_on_other_class(self): assert_equal(result.ds, expected_root.ds) assert_equal(result["subnode"].ds, expected_descendant.ds) - @pytest.mark.xfail + def test_binary_op_on_dataset(self): + ds1 = xr.Dataset({"a": [5], "b": [3]}) + ds2 = xr.Dataset({"x": [0.1, 0.2], "y": [10, 20]}) + dt = DataNode("root", data=ds1) + DataNode("subnode", data=ds2, parent=dt) + other_ds = xr.Dataset({"z": ("z", [0.1, 0.2])}) + + expected_root = DataNode("root", data=ds1 * other_ds) + expected_descendant = DataNode("subnode", data=ds2 * other_ds, parent=expected_root) + result = dt * other_ds + + assert_equal(result.ds, expected_root.ds) + assert_equal(result["subnode"].ds, expected_descendant.ds) + def test_binary_op_on_datatree(self): ds1 = xr.Dataset({"a": [5], "b": [3]}) ds2 = xr.Dataset({"x": [0.1, 0.2], "y": [10, 20]}) @@ -116,7 +131,6 @@ def test_binary_op_on_datatree(self): assert_equal(result["subnode"].ds, expected_descendant.ds) -@pytest.mark.xfail class TestUFuncs: def test_root(self): da = xr.DataArray(name="a", data=[1, 2, 3]) @@ -132,3 +146,9 @@ def test_descendants(self): expected_ds = np.sin(da.to_dataset()) result_ds = np.sin(dt)["results"].ds assert_equal(result_ds, expected_ds) + + def test_tree(self): + dt = create_test_datatree() + expected = create_test_datatree(modify=lambda ds: np.sin(ds)) + result_tree = np.sin(dt) + assert_tree_equal(result_tree, expected) From 9ccea151b4e2fd24aea7f58289dbe8b38c7da330 Mon Sep 17 00:00:00 2001 From: Thomas Nicholas Date: Thu, 2 Sep 2021 12:04:08 -0400 Subject: [PATCH 4/6] refactored ops out into new file --- datatree/datatree.py | 278 ++----------------------------------------- datatree/ops.py | 268 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 279 insertions(+), 267 deletions(-) create mode 100644 datatree/ops.py diff --git a/datatree/datatree.py b/datatree/datatree.py index e6a432fa..0bedec3e 100644 --- a/datatree/datatree.py +++ b/datatree/datatree.py @@ -5,44 +5,26 @@ import anytree from xarray.core import dtypes, utils -from xarray.core.arithmetic import DatasetArithmetic from xarray.core.combine import merge -from xarray.core.common import DataWithCoords from xarray.core.dataarray import DataArray from xarray.core.dataset import Dataset -from xarray.core.ops import NAN_CUM_METHODS, NAN_REDUCE_METHODS, REDUCE_METHODS from xarray.core.variable import Variable from .mapping import map_over_subtree +from .ops import ( + DataTreeArithmeticMixin, + MappedDatasetMethodsMixin, + MappedDataWithCoords, +) from .treenode import PathType, TreeNode, _init_single_treenode """ -The structure of a populated Datatree looks roughly like this: - -DataTree("root name") -|-- DataNode("weather") -| | Variable("wind_speed") -| | Variable("pressure") -| |-- DataNode("temperature") -| | Variable("sea_surface_temperature") -| | Variable("dew_point_temperature") -|-- DataNode("satellite image") -| | Variable("true_colour") -| |-- DataNode("infrared") -| | Variable("near_infrared") -| | Variable("far_infrared") -|-- DataNode("topography") -| |-- DataNode("elevation") -| | Variable("height_above_sea_level") -|-- DataNode("population") - - DEVELOPERS' NOTE ---------------- The idea of this module is to create a `DataTree` class which inherits the tree structure from TreeNode, and also copies the entire API of `xarray.Dataset`, but with certain methods decorated to instead map the dataset function over every node in the tree. As this API is copied without directly subclassing `xarray.Dataset` we instead create various Mixin -classes which each define part of `xarray.Dataset`'s extensive API. +classes (in ops.py) which each define part of `xarray.Dataset`'s extensive API. Some of these methods must be wrapped to map over all nodes in the subtree. Others are fine to inherit unaltered (normally because they (a) only call dataset properties and (b) don't return a dataset that should be nested into a new @@ -56,6 +38,8 @@ class DatasetPropertiesMixin: # TODO a neater way of setting all of these? # We wouldn't need this at all if we inherited directly from Dataset... + # TODO we could also just not define these at all, and require users to call e.g. dt.ds.dims ... + @property def dims(self): if self.has_data: @@ -159,252 +143,12 @@ def imag(self): chunks.__doc__ = Dataset.chunks.__doc__ -_MAPPED_DOCSTRING_ADDENDUM = textwrap.fill( - "This method was copied from xarray.Dataset, but has been altered to " - "call the method on the Datasets stored in every node of the subtree. " - "See the `map_over_subtree` function for more details.", - width=117, -) - -# TODO equals, broadcast_equals etc. -# TODO do dask-related private methods need to be exposed? -_DATASET_DASK_METHODS_TO_MAP = [ - "load", - "compute", - "persist", - "unify_chunks", - "chunk", - "map_blocks", -] -_DATASET_METHODS_TO_MAP = [ - "copy", - "as_numpy", - "__copy__", - "__deepcopy__", - "set_coords", - "reset_coords", - "info", - "isel", - "sel", - "head", - "tail", - "thin", - "broadcast_like", - "reindex_like", - "reindex", - "interp", - "interp_like", - "rename", - "rename_dims", - "rename_vars", - "swap_dims", - "expand_dims", - "set_index", - "reset_index", - "reorder_levels", - "stack", - "unstack", - "update", - "merge", - "drop_vars", - "drop_sel", - "drop_isel", - "drop_dims", - "transpose", - "dropna", - "fillna", - "interpolate_na", - "ffill", - "bfill", - "combine_first", - "reduce", - "map", - "assign", - "diff", - "shift", - "roll", - "sortby", - "quantile", - "rank", - "differentiate", - "integrate", - "cumulative_integrate", - "filter_by_attrs", - "polyfit", - "pad", - "idxmin", - "idxmax", - "argmin", - "argmax", - "query", - "curvefit", -] -# TODO unsure if these are called by external functions or not? -_DATASET_OPS_TO_MAP = ["_unary_op", "_binary_op", "_inplace_binary_op"] -_ALL_DATASET_METHODS_TO_MAP = ( - _DATASET_DASK_METHODS_TO_MAP + _DATASET_METHODS_TO_MAP + _DATASET_OPS_TO_MAP -) - -_DATA_WITH_COORDS_METHODS_TO_MAP = [ - "squeeze", - "clip", - "assign_coords", - "where", - "close", - "isnull", - "notnull", - "isin", - "astype", -] - -_TYPED_DATASET_OPS_TO_MAP = [ - "__add__", - "__sub__", - "__mul__", - "__pow__", - "__truediv__", - "__floordiv__", - "__mod__", - "__and__", - "__xor__", - "__or__", - "__lt__", - "__le__", - "__gt__", - "__ge__", - "__eq__", - "__ne__", - "__radd__", - "__rsub__", - "__rmul__", - "__rpow__", - "__rtruediv__", - "__rfloordiv__", - "__rmod__", - "__rand__", - "__rxor__", - "__ror__", - "__iadd__", - "__isub__", - "__imul__", - "__ipow__", - "__itruediv__", - "__ifloordiv__", - "__imod__", - "__iand__", - "__ixor__", - "__ior__", - "__neg__", - "__pos__", - "__abs__", - "__invert__", - "round", - "argsort", - "conj", - "conjugate", -] -# TODO NUM_BINARY_OPS apparently aren't defined on DatasetArithmetic, and don't appear to be injected anywhere... -_ARITHMETIC_METHODS_TO_MAP = ( - REDUCE_METHODS - + NAN_REDUCE_METHODS - + NAN_CUM_METHODS - + _TYPED_DATASET_OPS_TO_MAP - + ["__array_ufunc__"] -) - - -def _wrap_then_attach_to_cls( - target_cls_dict, source_cls, methods_to_set, wrap_func=None -): - """ - Attach given methods on a class, and optionally wrap each method first. (i.e. with map_over_subtree) - - Result is like having written this in the classes' definition: - ``` - @wrap_func - def method_name(self, *args, **kwargs): - return self.method(*args, **kwargs) - ``` - - Every method attached here needs to have a return value of Dataset or DataArray in order to construct a new tree. - - Parameters - ---------- - target_cls_dict : MappingProxy - The __dict__ attribute of the class which we want the methods to be added to. (The __dict__ attribute can also - be accessed by calling vars() from within that classes' definition.) This will be updated by this function. - source_cls : class - Class object from which we want to copy methods (and optionally wrap them). Should be the actual class object - (or instance), not just the __dict__. - methods_to_set : Iterable[Tuple[str, callable]] - The method names and definitions supplied as a list of (method_name_string, method) pairs. - This format matches the output of inspect.getmembers(). - wrap_func : callable, optional - Function to decorate each method with. Must have the same return type as the method. - """ - for method_name in methods_to_set: - orig_method = getattr(source_cls, method_name) - wrapped_method = ( - wrap_func(orig_method) if wrap_func is not None else orig_method - ) - target_cls_dict[method_name] = wrapped_method - - if wrap_func is map_over_subtree: - # Add a paragraph to the method's docstring explaining how it's been mapped - orig_method_docstring = orig_method.__doc__ - if orig_method_docstring is not None: - if "\n" in orig_method_docstring: - new_method_docstring = orig_method_docstring.replace( - "\n", _MAPPED_DOCSTRING_ADDENDUM, 1 - ) - else: - new_method_docstring = ( - orig_method_docstring + f"\n\n{_MAPPED_DOCSTRING_ADDENDUM}" - ) - setattr(target_cls_dict[method_name], "__doc__", new_method_docstring) - - -class MappedDatasetMethodsMixin: - """ - Mixin to add Dataset methods like .mean(), but wrapped to map over all nodes in the subtree. - """ - - __slots__ = () - _wrap_then_attach_to_cls( - vars(), Dataset, _ALL_DATASET_METHODS_TO_MAP, wrap_func=map_over_subtree - ) - - -class MappedDataWithCoords(DataWithCoords): - # TODO add mapped versions of groupby, weighted, rolling, rolling_exp, coarsen, resample - # TODO re-implement AttrsAccessMixin stuff so that it includes access to child nodes - _wrap_then_attach_to_cls( - vars(), - DataWithCoords, - _DATA_WITH_COORDS_METHODS_TO_MAP, - wrap_func=map_over_subtree, - ) - - -class DataTreeArithmetic(DatasetArithmetic): - """ - Mixin to add Dataset methods like __add__ and .mean(). - """ - - _wrap_then_attach_to_cls( - vars(), - DatasetArithmetic, - _ARITHMETIC_METHODS_TO_MAP, - wrap_func=map_over_subtree, - ) - - class DataTree( TreeNode, DatasetPropertiesMixin, MappedDatasetMethodsMixin, MappedDataWithCoords, - DataTreeArithmetic, + DataTreeArithmeticMixin, ): """ A tree-like hierarchical collection of xarray objects. @@ -908,7 +652,7 @@ def to_netcdf( def to_zarr(self, store, mode: str = "w", encoding=None, **kwargs): """ - Write datatree contents to a netCDF file. + Write datatree contents to a Zarr store. Parameters --------- @@ -925,7 +669,7 @@ def to_zarr(self, store, mode: str = "w", encoding=None, **kwargs): ``{"root/set1": {"my_variable": {"dtype": "int16", "scale_factor": 0.1}, ...}, ...}``. See ``xarray.Dataset.to_zarr`` for available options. kwargs : - Addional keyword arguments to be passed to ``xarray.Dataset.to_zarr`` + Additional keyword arguments to be passed to ``xarray.Dataset.to_zarr`` """ from .io import _datatree_to_zarr diff --git a/datatree/ops.py b/datatree/ops.py new file mode 100644 index 00000000..e411c973 --- /dev/null +++ b/datatree/ops.py @@ -0,0 +1,268 @@ +import textwrap + +from xarray import Dataset + +from .mapping import map_over_subtree + +""" +Module which specifies the subset of xarray.Dataset's API which we wish to copy onto DataTree. + +Structured to mirror the way xarray defines Dataset's various operations internally, but does not actually import from +xarray's internals directly, only the public-facing xarray.Dataset class. +""" + + +_MAPPED_DOCSTRING_ADDENDUM = textwrap.fill( + "This method was copied from xarray.Dataset, but has been altered to " + "call the method on the Datasets stored in every node of the subtree. " + "See the `map_over_subtree` function for more details.", + width=117, +) + +# TODO equals, broadcast_equals etc. +# TODO do dask-related private methods need to be exposed? +_DATASET_DASK_METHODS_TO_MAP = [ + "load", + "compute", + "persist", + "unify_chunks", + "chunk", + "map_blocks", +] +_DATASET_METHODS_TO_MAP = [ + "copy", + "as_numpy", + "__copy__", + "__deepcopy__", + "set_coords", + "reset_coords", + "info", + "isel", + "sel", + "head", + "tail", + "thin", + "broadcast_like", + "reindex_like", + "reindex", + "interp", + "interp_like", + "rename", + "rename_dims", + "rename_vars", + "swap_dims", + "expand_dims", + "set_index", + "reset_index", + "reorder_levels", + "stack", + "unstack", + "update", + "merge", + "drop_vars", + "drop_sel", + "drop_isel", + "drop_dims", + "transpose", + "dropna", + "fillna", + "interpolate_na", + "ffill", + "bfill", + "combine_first", + "reduce", + "map", + "assign", + "diff", + "shift", + "roll", + "sortby", + "quantile", + "rank", + "differentiate", + "integrate", + "cumulative_integrate", + "filter_by_attrs", + "polyfit", + "pad", + "idxmin", + "idxmax", + "argmin", + "argmax", + "query", + "curvefit", +] +_ALL_DATASET_METHODS_TO_MAP = _DATASET_DASK_METHODS_TO_MAP + _DATASET_METHODS_TO_MAP + +_DATA_WITH_COORDS_METHODS_TO_MAP = [ + "squeeze", + "clip", + "assign_coords", + "where", + "close", + "isnull", + "notnull", + "isin", + "astype", +] + +REDUCE_METHODS = ["all", "any"] +NAN_REDUCE_METHODS = [ + "max", + "min", + "mean", + "prod", + "sum", + "std", + "var", + "median", +] +NAN_CUM_METHODS = ["cumsum", "cumprod"] +_TYPED_DATASET_OPS_TO_MAP = [ + "__add__", + "__sub__", + "__mul__", + "__pow__", + "__truediv__", + "__floordiv__", + "__mod__", + "__and__", + "__xor__", + "__or__", + "__lt__", + "__le__", + "__gt__", + "__ge__", + "__eq__", + "__ne__", + "__radd__", + "__rsub__", + "__rmul__", + "__rpow__", + "__rtruediv__", + "__rfloordiv__", + "__rmod__", + "__rand__", + "__rxor__", + "__ror__", + "__iadd__", + "__isub__", + "__imul__", + "__ipow__", + "__itruediv__", + "__ifloordiv__", + "__imod__", + "__iand__", + "__ixor__", + "__ior__", + "__neg__", + "__pos__", + "__abs__", + "__invert__", + "round", + "argsort", + "conj", + "conjugate", +] +# TODO NUM_BINARY_OPS apparently aren't defined on DatasetArithmetic, and don't appear to be injected anywhere... +_ARITHMETIC_METHODS_TO_MAP = ( + REDUCE_METHODS + + NAN_REDUCE_METHODS + + NAN_CUM_METHODS + + _TYPED_DATASET_OPS_TO_MAP + + ["__array_ufunc__"] +) + + +def _wrap_then_attach_to_cls( + target_cls_dict, source_cls, methods_to_set, wrap_func=None +): + """ + Attach given methods on a class, and optionally wrap each method first. (i.e. with map_over_subtree) + + Result is like having written this in the classes' definition: + ``` + @wrap_func + def method_name(self, *args, **kwargs): + return self.method(*args, **kwargs) + ``` + + Every method attached here needs to have a return value of Dataset or DataArray in order to construct a new tree. + + Parameters + ---------- + target_cls_dict : MappingProxy + The __dict__ attribute of the class which we want the methods to be added to. (The __dict__ attribute can also + be accessed by calling vars() from within that classes' definition.) This will be updated by this function. + source_cls : class + Class object from which we want to copy methods (and optionally wrap them). Should be the actual class object + (or instance), not just the __dict__. + methods_to_set : Iterable[Tuple[str, callable]] + The method names and definitions supplied as a list of (method_name_string, method) pairs. + This format matches the output of inspect.getmembers(). + wrap_func : callable, optional + Function to decorate each method with. Must have the same return type as the method. + """ + for method_name in methods_to_set: + orig_method = getattr(source_cls, method_name) + wrapped_method = ( + wrap_func(orig_method) if wrap_func is not None else orig_method + ) + target_cls_dict[method_name] = wrapped_method + + if wrap_func is map_over_subtree: + # Add a paragraph to the method's docstring explaining how it's been mapped + orig_method_docstring = orig_method.__doc__ + if orig_method_docstring is not None: + if "\n" in orig_method_docstring: + new_method_docstring = orig_method_docstring.replace( + "\n", _MAPPED_DOCSTRING_ADDENDUM, 1 + ) + else: + new_method_docstring = ( + orig_method_docstring + f"\n\n{_MAPPED_DOCSTRING_ADDENDUM}" + ) + setattr(target_cls_dict[method_name], "__doc__", new_method_docstring) + + +class MappedDatasetMethodsMixin: + """ + Mixin to add methods defined specifically on the Dataset class such as .query(), but wrapped to map over all nodes + in the subtree. + """ + + _wrap_then_attach_to_cls( + target_cls_dict=vars(), + source_cls=Dataset, + methods_to_set=_ALL_DATASET_METHODS_TO_MAP, + wrap_func=map_over_subtree, + ) + + +class MappedDataWithCoords: + """ + Mixin to add coordinate-aware Dataset methods such as .where(), but wrapped to map over all nodes in the subtree. + """ + + # TODO add mapped versions of groupby, weighted, rolling, rolling_exp, coarsen, resample + # TODO re-implement AttrsAccessMixin stuff so that it includes access to child nodes + _wrap_then_attach_to_cls( + target_cls_dict=vars(), + source_cls=Dataset, + methods_to_set=_DATA_WITH_COORDS_METHODS_TO_MAP, + wrap_func=map_over_subtree, + ) + + +class DataTreeArithmeticMixin: + """ + Mixin to add Dataset arithmetic operations such as __add__, reduction methods such as .mean(), and enable numpy + ufuncs such as np.sin(), but wrapped to map over all nodes in the subtree. + """ + + _wrap_then_attach_to_cls( + target_cls_dict=vars(), + source_cls=Dataset, + methods_to_set=_ARITHMETIC_METHODS_TO_MAP, + wrap_func=map_over_subtree, + ) From f83ed27867d05ce2cc33e88a2639ebd0cfd6de7e Mon Sep 17 00:00:00 2001 From: Thomas Nicholas Date: Thu, 2 Sep 2021 12:04:36 -0400 Subject: [PATCH 5/6] linting --- datatree/tests/test_dataset_api.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/datatree/tests/test_dataset_api.py b/datatree/tests/test_dataset_api.py index 9af541c5..2c6a4d52 100644 --- a/datatree/tests/test_dataset_api.py +++ b/datatree/tests/test_dataset_api.py @@ -111,7 +111,9 @@ def test_binary_op_on_dataset(self): other_ds = xr.Dataset({"z": ("z", [0.1, 0.2])}) expected_root = DataNode("root", data=ds1 * other_ds) - expected_descendant = DataNode("subnode", data=ds2 * other_ds, parent=expected_root) + expected_descendant = DataNode( + "subnode", data=ds2 * other_ds, parent=expected_root + ) result = dt * other_ds assert_equal(result.ds, expected_root.ds) From 8d6c7d73c112b82b8a9aafc9bf80cc01fd259994 Mon Sep 17 00:00:00 2001 From: Thomas Nicholas Date: Thu, 2 Sep 2021 12:12:07 -0400 Subject: [PATCH 6/6] minimise imports of xarray internals --- datatree/datatree.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/datatree/datatree.py b/datatree/datatree.py index 0bedec3e..76fc1bf9 100644 --- a/datatree/datatree.py +++ b/datatree/datatree.py @@ -4,10 +4,8 @@ from typing import Any, Callable, Dict, Hashable, Iterable, List, Mapping, Union import anytree +from xarray import DataArray, Dataset, merge from xarray.core import dtypes, utils -from xarray.core.combine import merge -from xarray.core.dataarray import DataArray -from xarray.core.dataset import Dataset from xarray.core.variable import Variable from .mapping import map_over_subtree