Skip to content

Commit 58d5c30

Browse files
author
Klaus Zimmermann
committed
Adds lazy version of aggregated_by routine to Cube.
Closes #3280 Signed-off-by: Klaus Zimmermann <[email protected]>
1 parent 129ae41 commit 58d5c30

File tree

2 files changed

+39
-27
lines changed

2 files changed

+39
-27
lines changed

lib/iris/cube.py

Lines changed: 38 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -3316,10 +3316,6 @@ def aggregated_by(self, coords, aggregator, **kwargs):
33163316
Returns:
33173317
:class:`iris.cube.Cube`.
33183318
3319-
.. note::
3320-
3321-
This operation does not yet have support for lazy evaluation.
3322-
33233319
For example:
33243320
33253321
>>> import iris
@@ -3413,29 +3409,44 @@ def aggregated_by(self, coords, aggregator, **kwargs):
34133409
data_shape[dimension_to_groupby] = len(groupby)
34143410

34153411
# Aggregate the group-by data.
3416-
cube_slice = [slice(None, None)] * len(data_shape)
3417-
3418-
for i, groupby_slice in enumerate(groupby.group()):
3419-
# Slice the cube with the group-by slice to create a group-by
3420-
# sub-cube.
3421-
cube_slice[dimension_to_groupby] = groupby_slice
3422-
groupby_sub_cube = self[tuple(cube_slice)]
3423-
# Perform the aggregation over the group-by sub-cube and
3424-
# repatriate the aggregated data into the aggregate-by cube data.
3425-
cube_slice[dimension_to_groupby] = i
3426-
result = aggregator.aggregate(groupby_sub_cube.data,
3427-
axis=dimension_to_groupby,
3428-
**kwargs)
3429-
3430-
# Determine aggregation result data type for the aggregate-by cube
3431-
# data on first pass.
3432-
if i == 0:
3433-
if ma.isMaskedArray(self.data):
3434-
aggregateby_data = ma.zeros(data_shape, dtype=result.dtype)
3435-
else:
3436-
aggregateby_data = np.zeros(data_shape, dtype=result.dtype)
3437-
3438-
aggregateby_data[tuple(cube_slice)] = result
3412+
if (aggregator.lazy_func is not None and self.has_lazy_data()):
3413+
cube_slice_1 = [slice(None, None)] * dimension_to_groupby
3414+
cube_slice_2 = [slice(None, None)] * (len(data_shape) -
3415+
dimension_to_groupby -
3416+
1)
3417+
result = [
3418+
aggregator.lazy_aggregate(
3419+
self[tuple(cube_slice_1 +
3420+
[groupby_slice] +
3421+
cube_slice_2)].lazy_data(),
3422+
axis=dimension_to_groupby,
3423+
**kwargs) for groupby_slice in groupby.group()]
3424+
aggregateby_data = da.stack(result, axis=dimension_to_groupby)
3425+
else:
3426+
cube_slice = [slice(None, None)] * len(data_shape)
3427+
for i, groupby_slice in enumerate(groupby.group()):
3428+
# Slice the cube with the group-by slice to create a group-by
3429+
# sub-cube.
3430+
cube_slice[dimension_to_groupby] = groupby_slice
3431+
groupby_sub_cube = self[tuple(cube_slice)]
3432+
# Perform the aggregation over the group-by sub-cube and
3433+
# repatriate the aggregated data into the aggregate-by
3434+
# cube data.
3435+
cube_slice[dimension_to_groupby] = i
3436+
result = aggregator.aggregate(groupby_sub_cube.data,
3437+
axis=dimension_to_groupby,
3438+
**kwargs)
3439+
3440+
# Determine aggregation result data type for the aggregate-by
3441+
# cube data on first pass.
3442+
if i == 0:
3443+
if ma.isMaskedArray(self.data):
3444+
aggregateby_data = ma.zeros(data_shape,
3445+
dtype=result.dtype)
3446+
else:
3447+
aggregateby_data = np.zeros(data_shape,
3448+
dtype=result.dtype)
3449+
aggregateby_data[tuple(cube_slice)] = result
34393450

34403451
# Add the aggregation meta data to the aggregate-by cube.
34413452
aggregator.update_metadata(aggregateby_cube,

lib/iris/tests/unit/cube/test_Cube.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -476,6 +476,7 @@ def setUp(self):
476476
self.mock_agg.aggregate = mock.Mock(
477477
return_value=mock.Mock(dtype='object'))
478478
self.mock_agg.aggregate_shape = mock.Mock(return_value=())
479+
self.mock_agg.lazy_func = None
479480
self.mock_agg.post_process = mock.Mock(side_effect=lambda x, y, z: x)
480481

481482
def test_2d_coord_simple_agg(self):

0 commit comments

Comments
 (0)