From eceec5f7bbbe93b2f24f49af6361a135ae72e2e4 Mon Sep 17 00:00:00 2001 From: Articoking <90768774+Articoking@users.noreply.github.com> Date: Wed, 9 Aug 2023 15:50:54 +0200 Subject: [PATCH] Count documentation (#8057) * Changed aggregation example to include 0 value * Fixed broken links in count docs (#8055) * Added entry to whats-new.rst * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: Guillermo Cossio Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- doc/whats-new.rst | 2 + xarray/core/_aggregations.py | 512 +++++++++++++-------------- xarray/util/generate_aggregations.py | 15 +- 3 files changed, 271 insertions(+), 258 deletions(-) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 5bc4f256fdb..564c68bfc35 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -95,6 +95,8 @@ Documentation By `BenoƮt Bovy `_. - Added a page clarifying the role of Xarray core team members. (:pull:`7999`) By `Tom Nicholas `_. +- Fixed broken links in "See also" section of :py:meth:`Dataset.count` (:issue:`8055`, :pull:`8057`) + By `Articoking `_. Internal Changes ~~~~~~~~~~~~~~~~ diff --git a/xarray/core/_aggregations.py b/xarray/core/_aggregations.py index 4f6dce1a04a..d5070f97c6a 100644 --- a/xarray/core/_aggregations.py +++ b/xarray/core/_aggregations.py @@ -65,8 +65,8 @@ def count( See Also -------- - numpy.count - dask.array.count + pandas.DataFrame.count + dask.dataframe.DataFrame.count DataArray.count :ref:`agg` User guide on reduction or aggregation operations. @@ -74,7 +74,7 @@ def count( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -89,7 +89,7 @@ def count( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.count() @@ -296,7 +296,7 @@ def max( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -311,7 +311,7 @@ def max( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.max() @@ -383,7 +383,7 @@ def min( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -398,13 +398,13 @@ def min( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.min() Dimensions: () Data variables: - da float64 1.0 + da float64 0.0 Use ``skipna`` to control whether NaNs are ignored. @@ -474,7 +474,7 @@ def mean( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -489,13 +489,13 @@ def mean( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.mean() Dimensions: () Data variables: - da float64 1.8 + da float64 1.6 Use ``skipna`` to control whether NaNs are ignored. @@ -572,7 +572,7 @@ def prod( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -587,13 +587,13 @@ def prod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.prod() Dimensions: () Data variables: - da float64 12.0 + da float64 0.0 Use ``skipna`` to control whether NaNs are ignored. @@ -609,7 +609,7 @@ def prod( Dimensions: () Data variables: - da float64 12.0 + da float64 0.0 """ return self.reduce( duck_array_ops.prod, @@ -679,7 +679,7 @@ def sum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -694,13 +694,13 @@ def sum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.sum() Dimensions: () Data variables: - da float64 9.0 + da float64 8.0 Use ``skipna`` to control whether NaNs are ignored. @@ -716,7 +716,7 @@ def sum( Dimensions: () Data variables: - da float64 9.0 + da float64 8.0 """ return self.reduce( duck_array_ops.sum, @@ -783,7 +783,7 @@ def std( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -798,13 +798,13 @@ def std( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.std() Dimensions: () Data variables: - da float64 0.7483 + da float64 1.02 Use ``skipna`` to control whether NaNs are ignored. @@ -820,7 +820,7 @@ def std( Dimensions: () Data variables: - da float64 0.8367 + da float64 1.14 """ return self.reduce( duck_array_ops.std, @@ -887,7 +887,7 @@ def var( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -902,13 +902,13 @@ def var( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.var() Dimensions: () Data variables: - da float64 0.56 + da float64 1.04 Use ``skipna`` to control whether NaNs are ignored. @@ -924,7 +924,7 @@ def var( Dimensions: () Data variables: - da float64 0.7 + da float64 1.3 """ return self.reduce( duck_array_ops.var, @@ -987,7 +987,7 @@ def median( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1002,7 +1002,7 @@ def median( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.median() @@ -1078,7 +1078,7 @@ def cumsum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1093,14 +1093,14 @@ def cumsum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.cumsum() Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 3.0 6.0 7.0 9.0 9.0 + da (time) float64 1.0 3.0 6.0 6.0 8.0 8.0 Use ``skipna`` to control whether NaNs are ignored. @@ -1109,7 +1109,7 @@ def cumsum( Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 3.0 6.0 7.0 9.0 nan + da (time) float64 1.0 3.0 6.0 6.0 8.0 nan """ return self.reduce( duck_array_ops.cumsum, @@ -1171,7 +1171,7 @@ def cumprod( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1186,14 +1186,14 @@ def cumprod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.cumprod() Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 6.0 6.0 12.0 12.0 + da (time) float64 1.0 2.0 6.0 0.0 0.0 0.0 Use ``skipna`` to control whether NaNs are ignored. @@ -1202,7 +1202,7 @@ def cumprod( Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 6.0 6.0 12.0 nan + da (time) float64 1.0 2.0 6.0 0.0 0.0 nan """ return self.reduce( duck_array_ops.cumprod, @@ -1261,8 +1261,8 @@ def count( See Also -------- - numpy.count - dask.array.count + pandas.DataFrame.count + dask.dataframe.DataFrame.count Dataset.count :ref:`agg` User guide on reduction or aggregation operations. @@ -1270,7 +1270,7 @@ def count( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1279,7 +1279,7 @@ def count( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1483,7 +1483,7 @@ def max( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1562,14 +1562,14 @@ def min( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.min() - array(1.) + array(0.) Use ``skipna`` to control whether NaNs are ignored. @@ -1636,7 +1636,7 @@ def mean( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1645,14 +1645,14 @@ def mean( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.mean() - array(1.8) + array(1.6) Use ``skipna`` to control whether NaNs are ignored. @@ -1726,7 +1726,7 @@ def prod( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1735,14 +1735,14 @@ def prod( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.prod() - array(12.) + array(0.) Use ``skipna`` to control whether NaNs are ignored. @@ -1754,7 +1754,7 @@ def prod( >>> da.prod(skipna=True, min_count=2) - array(12.) + array(0.) """ return self.reduce( duck_array_ops.prod, @@ -1823,7 +1823,7 @@ def sum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1832,14 +1832,14 @@ def sum( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.sum() - array(9.) + array(8.) Use ``skipna`` to control whether NaNs are ignored. @@ -1851,7 +1851,7 @@ def sum( >>> da.sum(skipna=True, min_count=2) - array(9.) + array(8.) """ return self.reduce( duck_array_ops.sum, @@ -1917,7 +1917,7 @@ def std( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -1926,14 +1926,14 @@ def std( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.std() - array(0.74833148) + array(1.0198039) Use ``skipna`` to control whether NaNs are ignored. @@ -1945,7 +1945,7 @@ def std( >>> da.std(skipna=True, ddof=1) - array(0.83666003) + array(1.14017543) """ return self.reduce( duck_array_ops.std, @@ -2011,7 +2011,7 @@ def var( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -2020,14 +2020,14 @@ def var( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.var() - array(0.56) + array(1.04) Use ``skipna`` to control whether NaNs are ignored. @@ -2039,7 +2039,7 @@ def var( >>> da.var(skipna=True, ddof=1) - array(0.7) + array(1.3) """ return self.reduce( duck_array_ops.var, @@ -2101,7 +2101,7 @@ def median( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -2110,7 +2110,7 @@ def median( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -2193,14 +2193,14 @@ def cumsum( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.cumsum() - array([1., 3., 6., 7., 9., 9.]) + array([1., 3., 6., 6., 8., 8.]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.cumsum(skipna=False) - array([ 1., 3., 6., 7., 9., nan]) + array([ 1., 3., 6., 6., 8., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -2282,14 +2282,14 @@ def cumprod( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.cumprod() - array([ 1., 2., 6., 6., 12., 12.]) + array([1., 2., 6., 0., 0., 0.]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.cumprod(skipna=False) - array([ 1., 2., 6., 6., 12., nan]) + array([ 1., 2., 6., 0., 0., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -2400,7 +2400,7 @@ def count( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").count() @@ -2685,7 +2685,7 @@ def max( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -2700,7 +2700,7 @@ def max( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").max() @@ -2801,7 +2801,7 @@ def min( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -2816,7 +2816,7 @@ def min( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").min() @@ -2824,7 +2824,7 @@ def min( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 1.0 2.0 1.0 + da (labels) float64 1.0 2.0 0.0 Use ``skipna`` to control whether NaNs are ignored. @@ -2834,7 +2834,7 @@ def min( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 2.0 1.0 + da (labels) float64 nan 2.0 0.0 """ if ( flox_available @@ -2919,7 +2919,7 @@ def mean( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -2934,7 +2934,7 @@ def mean( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").mean() @@ -2942,7 +2942,7 @@ def mean( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 1.0 2.0 2.0 + da (labels) float64 1.0 2.0 1.5 Use ``skipna`` to control whether NaNs are ignored. @@ -2952,7 +2952,7 @@ def mean( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 2.0 2.0 + da (labels) float64 nan 2.0 1.5 """ if ( flox_available @@ -3044,7 +3044,7 @@ def prod( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -3059,7 +3059,7 @@ def prod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").prod() @@ -3067,7 +3067,7 @@ def prod( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 1.0 4.0 3.0 + da (labels) float64 1.0 4.0 0.0 Use ``skipna`` to control whether NaNs are ignored. @@ -3077,7 +3077,7 @@ def prod( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 4.0 3.0 + da (labels) float64 nan 4.0 0.0 Specify ``min_count`` for finer control over when NaNs are ignored. @@ -3087,7 +3087,7 @@ def prod( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 4.0 3.0 + da (labels) float64 nan 4.0 0.0 """ if ( flox_available @@ -3181,7 +3181,7 @@ def sum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -3196,7 +3196,7 @@ def sum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").sum() @@ -3204,7 +3204,7 @@ def sum( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 1.0 4.0 4.0 + da (labels) float64 1.0 4.0 3.0 Use ``skipna`` to control whether NaNs are ignored. @@ -3214,7 +3214,7 @@ def sum( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 4.0 4.0 + da (labels) float64 nan 4.0 3.0 Specify ``min_count`` for finer control over when NaNs are ignored. @@ -3224,7 +3224,7 @@ def sum( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 4.0 4.0 + da (labels) float64 nan 4.0 3.0 """ if ( flox_available @@ -3315,7 +3315,7 @@ def std( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -3330,7 +3330,7 @@ def std( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").std() @@ -3338,7 +3338,7 @@ def std( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 0.0 0.0 1.0 + da (labels) float64 0.0 0.0 1.5 Use ``skipna`` to control whether NaNs are ignored. @@ -3348,7 +3348,7 @@ def std( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 0.0 1.0 + da (labels) float64 nan 0.0 1.5 Specify ``ddof=1`` for an unbiased estimate. @@ -3358,7 +3358,7 @@ def std( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 0.0 1.414 + da (labels) float64 nan 0.0 2.121 """ if ( flox_available @@ -3449,7 +3449,7 @@ def var( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -3464,7 +3464,7 @@ def var( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").var() @@ -3472,7 +3472,7 @@ def var( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 0.0 0.0 1.0 + da (labels) float64 0.0 0.0 2.25 Use ``skipna`` to control whether NaNs are ignored. @@ -3482,7 +3482,7 @@ def var( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 0.0 1.0 + da (labels) float64 nan 0.0 2.25 Specify ``ddof=1`` for an unbiased estimate. @@ -3492,7 +3492,7 @@ def var( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 0.0 2.0 + da (labels) float64 nan 0.0 4.5 """ if ( flox_available @@ -3579,7 +3579,7 @@ def median( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -3594,7 +3594,7 @@ def median( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").median() @@ -3602,7 +3602,7 @@ def median( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 1.0 2.0 2.0 + da (labels) float64 1.0 2.0 1.5 Use ``skipna`` to control whether NaNs are ignored. @@ -3612,7 +3612,7 @@ def median( Coordinates: * labels (labels) object 'a' 'b' 'c' Data variables: - da (labels) float64 nan 2.0 2.0 + da (labels) float64 nan 2.0 1.5 """ return self.reduce( duck_array_ops.median, @@ -3682,7 +3682,7 @@ def cumsum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -3697,14 +3697,14 @@ def cumsum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").cumsum() Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 3.0 4.0 4.0 1.0 + da (time) float64 1.0 2.0 3.0 3.0 4.0 1.0 Use ``skipna`` to control whether NaNs are ignored. @@ -3713,7 +3713,7 @@ def cumsum( Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 3.0 4.0 4.0 nan + da (time) float64 1.0 2.0 3.0 3.0 4.0 nan """ return self.reduce( duck_array_ops.cumsum, @@ -3783,7 +3783,7 @@ def cumprod( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -3798,14 +3798,14 @@ def cumprod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.groupby("labels").cumprod() Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 3.0 3.0 4.0 1.0 + da (time) float64 1.0 2.0 3.0 0.0 4.0 1.0 Use ``skipna`` to control whether NaNs are ignored. @@ -3814,7 +3814,7 @@ def cumprod( Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 3.0 3.0 4.0 nan + da (time) float64 1.0 2.0 3.0 0.0 4.0 nan """ return self.reduce( duck_array_ops.cumprod, @@ -3881,8 +3881,8 @@ def count( See Also -------- - numpy.count - dask.array.count + pandas.DataFrame.count + dask.dataframe.DataFrame.count Dataset.count :ref:`resampling` User guide on resampling operations. @@ -3899,7 +3899,7 @@ def count( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -3914,7 +3914,7 @@ def count( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").count() @@ -4199,7 +4199,7 @@ def max( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -4214,7 +4214,7 @@ def max( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").max() @@ -4315,7 +4315,7 @@ def min( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -4330,7 +4330,7 @@ def min( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").min() @@ -4338,7 +4338,7 @@ def min( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 1.0 1.0 2.0 + da (time) float64 1.0 0.0 2.0 Use ``skipna`` to control whether NaNs are ignored. @@ -4348,7 +4348,7 @@ def min( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 1.0 1.0 nan + da (time) float64 1.0 0.0 nan """ if ( flox_available @@ -4433,7 +4433,7 @@ def mean( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -4448,7 +4448,7 @@ def mean( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").mean() @@ -4456,7 +4456,7 @@ def mean( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 1.0 2.0 2.0 + da (time) float64 1.0 1.667 2.0 Use ``skipna`` to control whether NaNs are ignored. @@ -4466,7 +4466,7 @@ def mean( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 1.0 2.0 nan + da (time) float64 1.0 1.667 nan """ if ( flox_available @@ -4558,7 +4558,7 @@ def prod( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -4573,7 +4573,7 @@ def prod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").prod() @@ -4581,7 +4581,7 @@ def prod( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 1.0 6.0 2.0 + da (time) float64 1.0 0.0 2.0 Use ``skipna`` to control whether NaNs are ignored. @@ -4591,7 +4591,7 @@ def prod( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 1.0 6.0 nan + da (time) float64 1.0 0.0 nan Specify ``min_count`` for finer control over when NaNs are ignored. @@ -4601,7 +4601,7 @@ def prod( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 nan 6.0 nan + da (time) float64 nan 0.0 nan """ if ( flox_available @@ -4695,7 +4695,7 @@ def sum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -4710,7 +4710,7 @@ def sum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").sum() @@ -4718,7 +4718,7 @@ def sum( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 1.0 6.0 2.0 + da (time) float64 1.0 5.0 2.0 Use ``skipna`` to control whether NaNs are ignored. @@ -4728,7 +4728,7 @@ def sum( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 1.0 6.0 nan + da (time) float64 1.0 5.0 nan Specify ``min_count`` for finer control over when NaNs are ignored. @@ -4738,7 +4738,7 @@ def sum( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 nan 6.0 nan + da (time) float64 nan 5.0 nan """ if ( flox_available @@ -4829,7 +4829,7 @@ def std( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -4844,7 +4844,7 @@ def std( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").std() @@ -4852,7 +4852,7 @@ def std( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 0.0 0.8165 0.0 + da (time) float64 0.0 1.247 0.0 Use ``skipna`` to control whether NaNs are ignored. @@ -4862,7 +4862,7 @@ def std( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 0.0 0.8165 nan + da (time) float64 0.0 1.247 nan Specify ``ddof=1`` for an unbiased estimate. @@ -4872,7 +4872,7 @@ def std( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 nan 1.0 nan + da (time) float64 nan 1.528 nan """ if ( flox_available @@ -4963,7 +4963,7 @@ def var( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -4978,7 +4978,7 @@ def var( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").var() @@ -4986,7 +4986,7 @@ def var( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 0.0 0.6667 0.0 + da (time) float64 0.0 1.556 0.0 Use ``skipna`` to control whether NaNs are ignored. @@ -4996,7 +4996,7 @@ def var( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 0.0 0.6667 nan + da (time) float64 0.0 1.556 nan Specify ``ddof=1`` for an unbiased estimate. @@ -5006,7 +5006,7 @@ def var( Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 Data variables: - da (time) float64 nan 1.0 nan + da (time) float64 nan 2.333 nan """ if ( flox_available @@ -5093,7 +5093,7 @@ def median( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -5108,7 +5108,7 @@ def median( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").median() @@ -5196,7 +5196,7 @@ def cumsum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -5211,14 +5211,14 @@ def cumsum( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").cumsum() Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 5.0 6.0 2.0 2.0 + da (time) float64 1.0 2.0 5.0 5.0 2.0 2.0 Use ``skipna`` to control whether NaNs are ignored. @@ -5227,7 +5227,7 @@ def cumsum( Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 5.0 6.0 2.0 nan + da (time) float64 1.0 2.0 5.0 5.0 2.0 nan """ return self.reduce( duck_array_ops.cumsum, @@ -5297,7 +5297,7 @@ def cumprod( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -5312,14 +5312,14 @@ def cumprod( * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> ds.resample(time="3M").cumprod() Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 6.0 6.0 2.0 2.0 + da (time) float64 1.0 2.0 6.0 0.0 2.0 2.0 Use ``skipna`` to control whether NaNs are ignored. @@ -5328,7 +5328,7 @@ def cumprod( Dimensions: (time: 6) Dimensions without coordinates: time Data variables: - da (time) float64 1.0 2.0 6.0 6.0 2.0 nan + da (time) float64 1.0 2.0 6.0 0.0 2.0 nan """ return self.reduce( duck_array_ops.cumprod, @@ -5395,8 +5395,8 @@ def count( See Also -------- - numpy.count - dask.array.count + pandas.DataFrame.count + dask.dataframe.DataFrame.count DataArray.count :ref:`groupby` User guide on groupby operations. @@ -5413,7 +5413,7 @@ def count( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -5422,7 +5422,7 @@ def count( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -5701,7 +5701,7 @@ def max( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -5808,14 +5808,14 @@ def min( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").min() - array([1., 2., 1.]) + array([1., 2., 0.]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -5823,7 +5823,7 @@ def min( >>> da.groupby("labels").min(skipna=False) - array([nan, 2., 1.]) + array([nan, 2., 0.]) Coordinates: * labels (labels) object 'a' 'b' 'c' """ @@ -5908,7 +5908,7 @@ def mean( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -5917,14 +5917,14 @@ def mean( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").mean() - array([1., 2., 2.]) + array([1. , 2. , 1.5]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -5932,7 +5932,7 @@ def mean( >>> da.groupby("labels").mean(skipna=False) - array([nan, 2., 2.]) + array([nan, 2. , 1.5]) Coordinates: * labels (labels) object 'a' 'b' 'c' """ @@ -6024,7 +6024,7 @@ def prod( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -6033,14 +6033,14 @@ def prod( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").prod() - array([1., 4., 3.]) + array([1., 4., 0.]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -6048,7 +6048,7 @@ def prod( >>> da.groupby("labels").prod(skipna=False) - array([nan, 4., 3.]) + array([nan, 4., 0.]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -6056,7 +6056,7 @@ def prod( >>> da.groupby("labels").prod(skipna=True, min_count=2) - array([nan, 4., 3.]) + array([nan, 4., 0.]) Coordinates: * labels (labels) object 'a' 'b' 'c' """ @@ -6150,7 +6150,7 @@ def sum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -6159,14 +6159,14 @@ def sum( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").sum() - array([1., 4., 4.]) + array([1., 4., 3.]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -6174,7 +6174,7 @@ def sum( >>> da.groupby("labels").sum(skipna=False) - array([nan, 4., 4.]) + array([nan, 4., 3.]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -6182,7 +6182,7 @@ def sum( >>> da.groupby("labels").sum(skipna=True, min_count=2) - array([nan, 4., 4.]) + array([nan, 4., 3.]) Coordinates: * labels (labels) object 'a' 'b' 'c' """ @@ -6273,7 +6273,7 @@ def std( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -6282,14 +6282,14 @@ def std( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").std() - array([0., 0., 1.]) + array([0. , 0. , 1.5]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -6297,7 +6297,7 @@ def std( >>> da.groupby("labels").std(skipna=False) - array([nan, 0., 1.]) + array([nan, 0. , 1.5]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -6305,7 +6305,7 @@ def std( >>> da.groupby("labels").std(skipna=True, ddof=1) - array([ nan, 0. , 1.41421356]) + array([ nan, 0. , 2.12132034]) Coordinates: * labels (labels) object 'a' 'b' 'c' """ @@ -6396,7 +6396,7 @@ def var( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -6405,14 +6405,14 @@ def var( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").var() - array([0., 0., 1.]) + array([0. , 0. , 2.25]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -6420,7 +6420,7 @@ def var( >>> da.groupby("labels").var(skipna=False) - array([nan, 0., 1.]) + array([ nan, 0. , 2.25]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -6428,7 +6428,7 @@ def var( >>> da.groupby("labels").var(skipna=True, ddof=1) - array([nan, 0., 2.]) + array([nan, 0. , 4.5]) Coordinates: * labels (labels) object 'a' 'b' 'c' """ @@ -6515,7 +6515,7 @@ def median( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -6524,14 +6524,14 @@ def median( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").median() - array([1., 2., 2.]) + array([1. , 2. , 1.5]) Coordinates: * labels (labels) object 'a' 'b' 'c' @@ -6539,7 +6539,7 @@ def median( >>> da.groupby("labels").median(skipna=False) - array([nan, 2., 2.]) + array([nan, 2. , 1.5]) Coordinates: * labels (labels) object 'a' 'b' 'c' """ @@ -6610,7 +6610,7 @@ def cumsum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -6619,14 +6619,14 @@ def cumsum( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").cumsum() - array([1., 2., 3., 4., 4., 1.]) + array([1., 2., 3., 3., 4., 1.]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").cumsum(skipna=False) - array([ 1., 2., 3., 4., 4., nan]) + array([ 1., 2., 3., 3., 4., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -6716,14 +6716,14 @@ def cumprod( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").cumprod() - array([1., 2., 3., 3., 4., 1.]) + array([1., 2., 3., 0., 4., 1.]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.groupby("labels").cumprod(skipna=False) - array([ 1., 2., 3., 3., 4., nan]) + array([ 1., 2., 3., 0., 4., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -6828,7 +6828,7 @@ def count( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -7107,7 +7107,7 @@ def max( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -7214,14 +7214,14 @@ def min( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").min() - array([1., 1., 2.]) + array([1., 0., 2.]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7229,7 +7229,7 @@ def min( >>> da.resample(time="3M").min(skipna=False) - array([ 1., 1., nan]) + array([ 1., 0., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 """ @@ -7314,7 +7314,7 @@ def mean( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -7323,14 +7323,14 @@ def mean( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").mean() - array([1., 2., 2.]) + array([1. , 1.66666667, 2. ]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7338,7 +7338,7 @@ def mean( >>> da.resample(time="3M").mean(skipna=False) - array([ 1., 2., nan]) + array([1. , 1.66666667, nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 """ @@ -7430,7 +7430,7 @@ def prod( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -7439,14 +7439,14 @@ def prod( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").prod() - array([1., 6., 2.]) + array([1., 0., 2.]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7454,7 +7454,7 @@ def prod( >>> da.resample(time="3M").prod(skipna=False) - array([ 1., 6., nan]) + array([ 1., 0., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7462,7 +7462,7 @@ def prod( >>> da.resample(time="3M").prod(skipna=True, min_count=2) - array([nan, 6., nan]) + array([nan, 0., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 """ @@ -7556,7 +7556,7 @@ def sum( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -7565,14 +7565,14 @@ def sum( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").sum() - array([1., 6., 2.]) + array([1., 5., 2.]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7580,7 +7580,7 @@ def sum( >>> da.resample(time="3M").sum(skipna=False) - array([ 1., 6., nan]) + array([ 1., 5., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7588,7 +7588,7 @@ def sum( >>> da.resample(time="3M").sum(skipna=True, min_count=2) - array([nan, 6., nan]) + array([nan, 5., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 """ @@ -7679,7 +7679,7 @@ def std( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -7688,14 +7688,14 @@ def std( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").std() - array([0. , 0.81649658, 0. ]) + array([0. , 1.24721913, 0. ]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7703,7 +7703,7 @@ def std( >>> da.resample(time="3M").std(skipna=False) - array([0. , 0.81649658, nan]) + array([0. , 1.24721913, nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7711,7 +7711,7 @@ def std( >>> da.resample(time="3M").std(skipna=True, ddof=1) - array([nan, 1., nan]) + array([ nan, 1.52752523, nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 """ @@ -7802,7 +7802,7 @@ def var( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -7811,14 +7811,14 @@ def var( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").var() - array([0. , 0.66666667, 0. ]) + array([0. , 1.55555556, 0. ]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7826,7 +7826,7 @@ def var( >>> da.resample(time="3M").var(skipna=False) - array([0. , 0.66666667, nan]) + array([0. , 1.55555556, nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 @@ -7834,7 +7834,7 @@ def var( >>> da.resample(time="3M").var(skipna=True, ddof=1) - array([nan, 1., nan]) + array([ nan, 2.33333333, nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31 """ @@ -7921,7 +7921,7 @@ def median( Examples -------- >>> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -7930,7 +7930,7 @@ def median( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -8025,14 +8025,14 @@ def cumsum( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").cumsum() - array([1., 2., 5., 6., 2., 2.]) + array([1., 2., 5., 5., 2., 2.]) Coordinates: labels (time) >> da.resample(time="3M").cumsum(skipna=False) - array([ 1., 2., 5., 6., 2., nan]) + array([ 1., 2., 5., 5., 2., nan]) Coordinates: labels (time) >> da = xr.DataArray( - ... np.array([1, 2, 3, 1, 2, np.nan]), + ... np.array([1, 2, 3, 0, 2, np.nan]), ... dims="time", ... coords=dict( ... time=("time", pd.date_range("2001-01-01", freq="M", periods=6)), @@ -8122,14 +8122,14 @@ def cumprod( ... ) >>> da - array([ 1., 2., 3., 1., 2., nan]) + array([ 1., 2., 3., 0., 2., nan]) Coordinates: * time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30 labels (time) >> da.resample(time="3M").cumprod() - array([1., 2., 6., 6., 2., 2.]) + array([1., 2., 6., 0., 2., 2.]) Coordinates: labels (time) >> da.resample(time="3M").cumprod(skipna=False) - array([ 1., 2., 6., 6., 2., nan]) + array([ 1., 2., 6., 0., 2., nan]) Coordinates: labels (time)