@@ -392,9 +392,9 @@ def _from_storage(self, stripped_indexer, drop_axes, out_shape, out_dtype,
392
392
# cf-python needs the sample size for all reductions;
393
393
# see the 'mtol' parameter of cf.Field.collapse.)
394
394
#
395
- # Note that in this case the reduced data must always
396
- # have the same number of dimensions as the original
397
- # array, i.e. 'drop_axes' is always considered False,
395
+ # Note that in all components must always have the
396
+ # same number of dimensions as the original array,
397
+ # i.e. 'drop_axes' is always considered False,
398
398
# regardless of its setting. (Rationale: dask
399
399
# reductions require the per-dask-chunk partial
400
400
# reductions to retain these dimensions so that
@@ -404,20 +404,21 @@ def _from_storage(self, stripped_indexer, drop_axes, out_shape, out_dtype,
404
404
n = np .reshape (n , shape1 )
405
405
out = out .reshape (shape1 )
406
406
407
+ n = np .sum (counts ).reshape (shape1 )
407
408
if self ._method == "mean" :
408
409
# For the average, the returned component is
409
410
# "sum", not "mean"
410
- out = {"sum" : out , "n" : sum ( counts ) }
411
+ out = {"sum" : out , "n" : n }
411
412
else :
412
- out = {self ._method : out , "n" : sum ( counts ) }
413
+ out = {self ._method : out , "n" : n }
413
414
else :
414
415
# Return the reduced data as a numpy array. For most
415
416
# methods the data is already in this form.
416
417
if self ._method == "mean" :
417
418
# For the average, it is actually the sum that has
418
419
# been created, so we need to divide by the sample
419
420
# size.
420
- out = out / sum (counts )
421
+ out = out / np . sum (counts ). reshape ( shape1 )
421
422
422
423
return out
423
424
0 commit comments