|
24 | 24 | from db_dtypes import pandas_backports
|
25 | 25 |
|
26 | 26 |
|
| 27 | +VALUE_PARSING_TEST_CASES = [ |
| 28 | + # Min/Max values for pandas.Timestamp. |
| 29 | + ("1677-09-22", datetime.date(1677, 9, 22)), |
| 30 | + ("2262-04-11", datetime.date(2262, 4, 11)), |
| 31 | + # Typical "zero" values. |
| 32 | + ("1900-01-01", datetime.date(1900, 1, 1)), |
| 33 | + ("1970-01-01", datetime.date(1970, 1, 1)), |
| 34 | + # Assorted values. |
| 35 | + ("1993-10-31", datetime.date(1993, 10, 31)), |
| 36 | + (datetime.date(1993, 10, 31), datetime.date(1993, 10, 31)), |
| 37 | + ("2012-02-29", datetime.date(2012, 2, 29)), |
| 38 | + (numpy.datetime64("2012-02-29"), datetime.date(2012, 2, 29)), |
| 39 | + ("2021-12-17", datetime.date(2021, 12, 17)), |
| 40 | + (pandas.Timestamp("2021-12-17"), datetime.date(2021, 12, 17)), |
| 41 | + ("2038-01-19", datetime.date(2038, 1, 19)), |
| 42 | +] |
| 43 | + |
| 44 | +NULL_VALUE_TEST_CASES = [ |
| 45 | + None, |
| 46 | + pandas.NaT, |
| 47 | + float("nan"), |
| 48 | +] |
| 49 | + |
| 50 | +if hasattr(pandas, "NA"): |
| 51 | + NULL_VALUE_TEST_CASES.append(pandas.NA) |
| 52 | + |
| 53 | + |
27 | 54 | def test_box_func():
|
28 | 55 | input_array = db_dtypes.DateArray([])
|
29 | 56 | input_datetime = datetime.datetime(2022, 3, 16)
|
@@ -58,26 +85,49 @@ def test__cmp_method_with_scalar():
|
58 | 85 | assert got[0]
|
59 | 86 |
|
60 | 87 |
|
61 |
| -@pytest.mark.parametrize( |
62 |
| - "value, expected", |
63 |
| - [ |
64 |
| - # Min/Max values for pandas.Timestamp. |
65 |
| - ("1677-09-22", datetime.date(1677, 9, 22)), |
66 |
| - ("2262-04-11", datetime.date(2262, 4, 11)), |
67 |
| - # Typical "zero" values. |
68 |
| - ("1900-01-01", datetime.date(1900, 1, 1)), |
69 |
| - ("1970-01-01", datetime.date(1970, 1, 1)), |
70 |
| - # Assorted values. |
71 |
| - ("1993-10-31", datetime.date(1993, 10, 31)), |
72 |
| - ("2012-02-29", datetime.date(2012, 2, 29)), |
73 |
| - ("2021-12-17", datetime.date(2021, 12, 17)), |
74 |
| - ("2038-01-19", datetime.date(2038, 1, 19)), |
75 |
| - ], |
76 |
| -) |
| 88 | +@pytest.mark.parametrize("value, expected", VALUE_PARSING_TEST_CASES) |
77 | 89 | def test_date_parsing(value, expected):
|
78 | 90 | assert pandas.Series([value], dtype="dbdate")[0] == expected
|
79 | 91 |
|
80 | 92 |
|
| 93 | +@pytest.mark.parametrize("value", NULL_VALUE_TEST_CASES) |
| 94 | +def test_date_parsing_null(value): |
| 95 | + assert pandas.Series([value], dtype="dbdate")[0] is pandas.NaT |
| 96 | + |
| 97 | + |
| 98 | +@pytest.mark.parametrize("value, expected", VALUE_PARSING_TEST_CASES) |
| 99 | +def test_date_set_item(value, expected): |
| 100 | + series = pandas.Series([None], dtype="dbdate") |
| 101 | + series[0] = value |
| 102 | + assert series[0] == expected |
| 103 | + |
| 104 | + |
| 105 | +@pytest.mark.parametrize("value", NULL_VALUE_TEST_CASES) |
| 106 | +def test_date_set_item_null(value): |
| 107 | + series = pandas.Series(["1970-01-01"], dtype="dbdate") |
| 108 | + series[0] = value |
| 109 | + assert series[0] is pandas.NaT |
| 110 | + |
| 111 | + |
| 112 | +def test_date_set_slice(): |
| 113 | + series = pandas.Series([None, None, None], dtype="dbdate") |
| 114 | + series[:] = [ |
| 115 | + datetime.date(2022, 3, 21), |
| 116 | + "2011-12-13", |
| 117 | + numpy.datetime64("1998-09-04"), |
| 118 | + ] |
| 119 | + assert series[0] == datetime.date(2022, 3, 21) |
| 120 | + assert series[1] == datetime.date(2011, 12, 13) |
| 121 | + assert series[2] == datetime.date(1998, 9, 4) |
| 122 | + |
| 123 | + |
| 124 | +def test_date_set_slice_null(): |
| 125 | + series = pandas.Series(["1970-01-01"] * len(NULL_VALUE_TEST_CASES), dtype="dbdate") |
| 126 | + series[:] = NULL_VALUE_TEST_CASES |
| 127 | + for row_index in range(len(NULL_VALUE_TEST_CASES)): |
| 128 | + assert series[row_index] is pandas.NaT |
| 129 | + |
| 130 | + |
81 | 131 | @pytest.mark.parametrize(
|
82 | 132 | "value, error",
|
83 | 133 | [
|
|
0 commit comments