|
| 1 | +import collections |
1 | 2 | from datetime import datetime, timedelta |
2 | 3 | from io import StringIO |
3 | 4 | import sys |
@@ -246,27 +247,15 @@ def test_nunique(self, index_or_series_obj): |
246 | 247 | assert result == len(obj.unique()) |
247 | 248 |
|
248 | 249 | def test_value_counts(self, index_or_series_obj): |
249 | | - orig = index_or_series_obj |
250 | | - obj = multiply_values(orig.copy()) |
251 | | - |
252 | | - if orig.duplicated().any(): |
253 | | - # FIXME: duplicated values should work. |
254 | | - pytest.xfail( |
255 | | - "The test implementation isn't flexible enough to deal" |
256 | | - " with duplicated values. This isn't a bug in the" |
257 | | - " application code, but in the test code." |
258 | | - ) |
259 | | - |
260 | | - expected_index = Index(orig.values[::-1], dtype=orig.dtype) |
261 | | - if is_datetime64tz_dtype(obj): |
262 | | - expected_index = expected_index.normalize() |
263 | | - expected_s = Series( |
264 | | - range(len(orig), 0, -1), index=expected_index, dtype="int64" |
265 | | - ) |
266 | | - |
| 250 | + obj = multiply_values(index_or_series_obj) |
267 | 251 | result = obj.value_counts() |
268 | | - tm.assert_series_equal(result, expected_s) |
269 | | - assert result.index.name is None |
| 252 | + |
| 253 | + counter = collections.Counter(obj) |
| 254 | + expected = pd.Series(dict(counter.most_common()), dtype=int) |
| 255 | + expected.index = expected.index.astype(obj.dtype) |
| 256 | + if isinstance(obj, pd.MultiIndex): |
| 257 | + expected.index = pd.Index(expected.index) |
| 258 | + tm.assert_series_equal(result, expected) |
270 | 259 |
|
271 | 260 | @pytest.mark.parametrize("null_obj", [np.nan, None]) |
272 | 261 | def test_value_counts_unique_nunique_null(self, null_obj, index_or_series_obj): |
|
0 commit comments