Skip to content

Commit

Permalink
REF: misplaced tests, parametrize, simplify (#55870)
Browse files Browse the repository at this point in the history
* REF: misplaced tests

* REF: libalgos tests

* rename standard pattern

* split tests

* simplify reset_index tests

* REF: misplaced tests

* de-duplicate test

* parametrize

* revert
  • Loading branch information
jbrockmendel authored Nov 7, 2023
1 parent 7b32c17 commit f2794fd
Show file tree
Hide file tree
Showing 18 changed files with 372 additions and 443 deletions.
95 changes: 19 additions & 76 deletions pandas/tests/arithmetic/test_datetime64.py
Original file line number Diff line number Diff line change
Expand Up @@ -1421,8 +1421,9 @@ def test_dt64arr_add_sub_relativedelta_offsets(self, box_with_array, unit):
@pytest.mark.parametrize("normalize", [True, False])
@pytest.mark.parametrize("n", [0, 5])
@pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
@pytest.mark.parametrize("tz", [None, "US/Central"])
def test_dt64arr_add_sub_DateOffsets(
self, box_with_array, n, normalize, cls_and_kwargs, unit
self, box_with_array, n, normalize, cls_and_kwargs, unit, tz
):
# GH#10699
# assert vectorized operation matches pointwise operations
Expand All @@ -1444,33 +1445,33 @@ def test_dt64arr_add_sub_DateOffsets(
# passing n = 0 is invalid for these offset classes
return

vec = DatetimeIndex(
[
Timestamp("2000-01-05 00:15:00"),
Timestamp("2000-01-31 00:23:00"),
Timestamp("2000-01-01"),
Timestamp("2000-03-31"),
Timestamp("2000-02-29"),
Timestamp("2000-12-31"),
Timestamp("2000-05-15"),
Timestamp("2001-06-15"),
]
).as_unit(unit)
vec = (
DatetimeIndex(
[
Timestamp("2000-01-05 00:15:00"),
Timestamp("2000-01-31 00:23:00"),
Timestamp("2000-01-01"),
Timestamp("2000-03-31"),
Timestamp("2000-02-29"),
Timestamp("2000-12-31"),
Timestamp("2000-05-15"),
Timestamp("2001-06-15"),
]
)
.as_unit(unit)
.tz_localize(tz)
)
vec = tm.box_expected(vec, box_with_array)
vec_items = vec.iloc[0] if box_with_array is pd.DataFrame else vec

offset_cls = getattr(pd.offsets, cls_name)

# pandas.errors.PerformanceWarning: Non-vectorized DateOffset being
# applied to Series or DatetimeIndex
# we aren't testing that here, so ignore.

offset = offset_cls(n, normalize=normalize, **kwargs)

# TODO(GH#55564): as_unit will be unnecessary
expected = DatetimeIndex([x + offset for x in vec_items]).as_unit(unit)
expected = tm.box_expected(expected, box_with_array)
tm.assert_equal(expected, vec + offset)
tm.assert_equal(expected, offset + vec)

expected = DatetimeIndex([x - offset for x in vec_items]).as_unit(unit)
expected = tm.box_expected(expected, box_with_array)
Expand All @@ -1483,64 +1484,6 @@ def test_dt64arr_add_sub_DateOffsets(
with pytest.raises(TypeError, match=msg):
offset - vec

def test_dt64arr_add_sub_DateOffset(self, box_with_array):
# GH#10699
s = date_range("2000-01-01", "2000-01-31", name="a")
s = tm.box_expected(s, box_with_array)
result = s + DateOffset(years=1)
result2 = DateOffset(years=1) + s
exp = date_range("2001-01-01", "2001-01-31", name="a")._with_freq(None)
exp = tm.box_expected(exp, box_with_array)
tm.assert_equal(result, exp)
tm.assert_equal(result2, exp)

result = s - DateOffset(years=1)
exp = date_range("1999-01-01", "1999-01-31", name="a")._with_freq(None)
exp = tm.box_expected(exp, box_with_array)
tm.assert_equal(result, exp)

s = DatetimeIndex(
[
Timestamp("2000-01-15 00:15:00", tz="US/Central"),
Timestamp("2000-02-15", tz="US/Central"),
],
name="a",
)
s = tm.box_expected(s, box_with_array)
result = s + pd.offsets.Day()
result2 = pd.offsets.Day() + s
exp = DatetimeIndex(
[
Timestamp("2000-01-16 00:15:00", tz="US/Central"),
Timestamp("2000-02-16", tz="US/Central"),
],
name="a",
)
exp = tm.box_expected(exp, box_with_array)
tm.assert_equal(result, exp)
tm.assert_equal(result2, exp)

s = DatetimeIndex(
[
Timestamp("2000-01-15 00:15:00", tz="US/Central"),
Timestamp("2000-02-15", tz="US/Central"),
],
name="a",
)
s = tm.box_expected(s, box_with_array)
result = s + pd.offsets.MonthEnd()
result2 = pd.offsets.MonthEnd() + s
exp = DatetimeIndex(
[
Timestamp("2000-01-31 00:15:00", tz="US/Central"),
Timestamp("2000-02-29", tz="US/Central"),
],
name="a",
)
exp = tm.box_expected(exp, box_with_array)
tm.assert_equal(result, exp)
tm.assert_equal(result2, exp)

@pytest.mark.parametrize(
"other",
[
Expand Down
12 changes: 12 additions & 0 deletions pandas/tests/arithmetic/test_numeric.py
Original file line number Diff line number Diff line change
Expand Up @@ -1393,6 +1393,18 @@ def test_addsub_arithmetic(self, dtype, delta):
tm.assert_index_equal(index - index, 0 * index)
assert not (index - index).empty

def test_pow_nan_with_zero(self, box_with_array):
left = Index([np.nan, np.nan, np.nan])
right = Index([0, 0, 0])
expected = Index([1.0, 1.0, 1.0])

left = tm.box_expected(left, box_with_array)
right = tm.box_expected(right, box_with_array)
expected = tm.box_expected(expected, box_with_array)

result = left**right
tm.assert_equal(result, expected)


def test_fill_value_inf_masking():
# GH #27464 make sure we mask 0/1 with Inf and not NaN
Expand Down
1 change: 0 additions & 1 deletion pandas/tests/arrays/integer/test_dtypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ def test_dtypes(dtype):

@pytest.mark.parametrize("op", ["sum", "min", "max", "prod"])
def test_preserve_dtypes(op):
# TODO(#22346): preserve Int64 dtype
# for ops that enable (mean would actually work here
# but generally it is a float return value)
df = pd.DataFrame(
Expand Down
13 changes: 13 additions & 0 deletions pandas/tests/arrays/interval/test_formats.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from pandas.core.arrays import IntervalArray


def test_repr():
# GH#25022
arr = IntervalArray.from_tuples([(0, 1), (1, 2)])
result = repr(arr)
expected = (
"<IntervalArray>\n"
"[(0, 1], (1, 2]]\n"
"Length: 2, dtype: interval[int64, right]"
)
assert result == expected
12 changes: 0 additions & 12 deletions pandas/tests/arrays/interval/test_interval.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,18 +166,6 @@ def test_setitem_mismatched_closed(self):
tm.assert_interval_array_equal(arr, orig)


def test_repr():
# GH 25022
arr = IntervalArray.from_tuples([(0, 1), (1, 2)])
result = repr(arr)
expected = (
"<IntervalArray>\n"
"[(0, 1], (1, 2]]\n"
"Length: 2, dtype: interval[int64, right]"
)
assert result == expected


class TestReductions:
def test_min_max_invalid_axis(self, left_right_dtypes):
left, right = left_right_dtypes
Expand Down
File renamed without changes.
54 changes: 16 additions & 38 deletions pandas/tests/frame/methods/test_reset_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ def test_reset_index_tz(self, tz_aware_fixture):
},
columns=["idx", "a", "b"],
)
tm.assert_frame_equal(df.reset_index(), expected)
result = df.reset_index()
tm.assert_frame_equal(result, expected)

@pytest.mark.parametrize("tz", ["US/Eastern", "dateutil/US/Eastern"])
def test_frame_reset_index_tzaware_index(self, tz):
Expand Down Expand Up @@ -487,23 +488,20 @@ def test_reset_index_datetime(self, tz_naive_fixture):

expected = DataFrame(
{
"idx1": [
datetime(2011, 1, 1),
datetime(2011, 1, 2),
datetime(2011, 1, 3),
datetime(2011, 1, 4),
datetime(2011, 1, 5),
],
"idx1": idx1,
"idx2": np.arange(5, dtype="int64"),
"a": np.arange(5, dtype="int64"),
"b": ["A", "B", "C", "D", "E"],
},
columns=["idx1", "idx2", "a", "b"],
)
expected["idx1"] = expected["idx1"].apply(lambda d: Timestamp(d, tz=tz))

tm.assert_frame_equal(df.reset_index(), expected)

def test_reset_index_datetime2(self, tz_naive_fixture):
tz = tz_naive_fixture
idx1 = date_range("1/1/2011", periods=5, freq="D", tz=tz, name="idx1")
idx2 = Index(range(5), name="idx2", dtype="int64")
idx3 = date_range(
"1/1/2012", periods=5, freq="MS", tz="Europe/Paris", name="idx3"
)
Expand All @@ -515,54 +513,34 @@ def test_reset_index_datetime(self, tz_naive_fixture):

expected = DataFrame(
{
"idx1": [
datetime(2011, 1, 1),
datetime(2011, 1, 2),
datetime(2011, 1, 3),
datetime(2011, 1, 4),
datetime(2011, 1, 5),
],
"idx1": idx1,
"idx2": np.arange(5, dtype="int64"),
"idx3": [
datetime(2012, 1, 1),
datetime(2012, 2, 1),
datetime(2012, 3, 1),
datetime(2012, 4, 1),
datetime(2012, 5, 1),
],
"idx3": idx3,
"a": np.arange(5, dtype="int64"),
"b": ["A", "B", "C", "D", "E"],
},
columns=["idx1", "idx2", "idx3", "a", "b"],
)
expected["idx1"] = expected["idx1"].apply(lambda d: Timestamp(d, tz=tz))
expected["idx3"] = expected["idx3"].apply(
lambda d: Timestamp(d, tz="Europe/Paris")
)
tm.assert_frame_equal(df.reset_index(), expected)
result = df.reset_index()
tm.assert_frame_equal(result, expected)

def test_reset_index_datetime3(self, tz_naive_fixture):
# GH#7793
idx = MultiIndex.from_product(
[["a", "b"], date_range("20130101", periods=3, tz=tz)]
)
tz = tz_naive_fixture
dti = date_range("20130101", periods=3, tz=tz)
idx = MultiIndex.from_product([["a", "b"], dti])
df = DataFrame(
np.arange(6, dtype="int64").reshape(6, 1), columns=["a"], index=idx
)

expected = DataFrame(
{
"level_0": "a a a b b b".split(),
"level_1": [
datetime(2013, 1, 1),
datetime(2013, 1, 2),
datetime(2013, 1, 3),
]
* 2,
"level_1": dti.append(dti),
"a": np.arange(6, dtype="int64"),
},
columns=["level_0", "level_1", "a"],
)
expected["level_1"] = expected["level_1"].apply(lambda d: Timestamp(d, tz=tz))
result = df.reset_index()
tm.assert_frame_equal(result, expected)

Expand Down
14 changes: 0 additions & 14 deletions pandas/tests/frame/test_arithmetic.py
Original file line number Diff line number Diff line change
Expand Up @@ -1904,20 +1904,6 @@ def test_pow_with_realignment():
tm.assert_frame_equal(result, expected)


# TODO: move to tests.arithmetic and parametrize
def test_pow_nan_with_zero():
left = DataFrame({"A": [np.nan, np.nan, np.nan]})
right = DataFrame({"A": [0, 0, 0]})

expected = DataFrame({"A": [1.0, 1.0, 1.0]})

result = left**right
tm.assert_frame_equal(result, expected)

result = left["A"] ** right["A"]
tm.assert_series_equal(result, expected["A"])


def test_dataframe_series_extension_dtypes():
# https://github.com/pandas-dev/pandas/issues/34311
df = DataFrame(
Expand Down
Loading

0 comments on commit f2794fd

Please sign in to comment.