Skip to content
1 change: 1 addition & 0 deletions doc/source/whatsnew/v3.0.0.rst
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,7 @@ Bug fixes
- Fixed bug in :meth:`Series.diff` allowing non-integer values for the ``periods`` argument. (:issue:`56607`)
- Fixed bug in :meth:`Series.rank` that doesn't preserve missing values for nullable integers when ``na_option='keep'``. (:issue:`56976`)
- Fixed bug in :meth:`Series.replace` and :meth:`DataFrame.replace` inconsistently replacing matching instances when ``regex=True`` and missing values are present. (:issue:`56599`)
- Fixed bug in :meth:`Series.rolling.kurt` with small sized values arrays with low variance getting zeroed out even when numerically stable (:issue:`57972`)

Categorical
^^^^^^^^^^^
Expand Down
3 changes: 2 additions & 1 deletion pandas/_libs/window/aggregations.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -712,7 +712,8 @@ cdef float64_t calc_kurt(int64_t minp, int64_t nobs,
# if the variance is less than 1e-14, it could be
# treat as zero, here we follow the original
# skew/kurt behaviour to check B <= 1e-14
if B <= 1e-14:
# #57972: for small arrays the cutoff can be lowered
if B <= 1e-14 and nobs > 100 or B <= 1e-16:
result = NaN
else:
K = (dnobs * dnobs - 1.) * D / (B * B) - 3 * ((dnobs - 1.) ** 2)
Expand Down
16 changes: 10 additions & 6 deletions pandas/core/nanops.py
Original file line number Diff line number Diff line change
Expand Up @@ -1357,9 +1357,13 @@ def nankurt(
# floating point error
#
# #18044 in _libs/windows.pyx calc_kurt follow this behavior
# to fix the fperr to treat denom <1e-14 as zero
numerator = _zero_out_fperr(numerator)
denominator = _zero_out_fperr(denominator)
# to fix the fperr to treat denom <1e-14 as zero (default cutoff)
# GH-57972 set cutoff lower for small arrays to prevent cutoff of otherwise
# numerically stable values
length = count[0] if isinstance(count, np.ndarray) else count
cutoff = 1e-14 if length > 100 else 1e-16
numerator = _zero_out_fperr(numerator, cutoff)
denominator = _zero_out_fperr(denominator, cutoff)

if not isinstance(denominator, np.ndarray):
# if ``denom`` is a scalar, check these corner cases first before
Expand Down Expand Up @@ -1576,12 +1580,12 @@ def check_below_min_count(
return False


def _zero_out_fperr(arg):
def _zero_out_fperr(arg, cutoff=1e-14):
# #18044 reference this behavior to fix rolling skew/kurt issue
if isinstance(arg, np.ndarray):
return np.where(np.abs(arg) < 1e-14, 0, arg)
return np.where(np.abs(arg) < cutoff, 0, arg)
else:
return arg.dtype.type(0) if np.abs(arg) < 1e-14 else arg
return arg.dtype.type(0) if np.abs(arg) < cutoff else arg


@disallow("M8", "m8")
Expand Down
12 changes: 12 additions & 0 deletions pandas/tests/test_nanops.py
Original file line number Diff line number Diff line change
Expand Up @@ -1105,6 +1105,18 @@ def test_nans_skipna(self, samples, actual_kurt):
kurt = nanops.nankurt(samples, skipna=True)
tm.assert_almost_equal(kurt, actual_kurt)

def test_small_arrays_with_low_variance(self):
# GH-57972
# small sample arrays with low variance have a lower threshold for breakdown
# of numerical stability and should be handled accordingly
low_var_samples = np.array(
[-2.05191341e-05] + [0.0e00] * 4 + [-4.10391103e-05] + [0.0e00] * 23
)
# calculated with scipy.status kurtosis(low_var_samples, bias=False)
scipy_kurt = 18.087646853025614
kurt = nanops.nankurt(low_var_samples)
tm.assert_almost_equal(kurt, scipy_kurt)

@property
def prng(self):
return np.random.default_rng(2)
Expand Down