diff --git a/doc/source/whatsnew/v2.2.0.rst b/doc/source/whatsnew/v2.2.0.rst index c09c7d509ca47..d9909b0dbfad8 100644 --- a/doc/source/whatsnew/v2.2.0.rst +++ b/doc/source/whatsnew/v2.2.0.rst @@ -330,6 +330,7 @@ Datetimelike - Bug in addition or subtraction of :class:`BusinessDay` offset with ``offset`` attribute to non-nanosecond :class:`Index`, :class:`Series`, or :class:`DataFrame` column giving incorrect results (:issue:`55608`) - Bug in addition or subtraction of :class:`DateOffset` objects with microsecond components to ``datetime64`` :class:`Index`, :class:`Series`, or :class:`DataFrame` columns with non-nanosecond resolution (:issue:`55595`) - Bug in addition or subtraction of very large :class:`Tick` objects with :class:`Timestamp` or :class:`Timedelta` objects raising ``OverflowError`` instead of ``OutOfBoundsTimedelta`` (:issue:`55503`) +- Bug in creating a :class:`Index`, :class:`Series`, or :class:`DataFrame` with a non-nanosecond ``datetime64`` dtype and inputs that would be out of bounds for a ``datetime64[ns]`` incorrectly raising ``OutOfBoundsDatetime`` (:issue:`55756`) - Timedelta diff --git a/pandas/_libs/tslib.pyi b/pandas/_libs/tslib.pyi index 9819b5173db56..7f95bfc717633 100644 --- a/pandas/_libs/tslib.pyi +++ b/pandas/_libs/tslib.pyi @@ -23,6 +23,7 @@ def array_to_datetime( dayfirst: bool = ..., yearfirst: bool = ..., utc: bool = ..., + creso: int = ..., ) -> tuple[np.ndarray, tzinfo | None]: ... # returned ndarray may be object dtype or datetime64[ns] diff --git a/pandas/_libs/tslib.pyx b/pandas/_libs/tslib.pyx index 576ac3f5dcba8..d2eeea78ee7e8 100644 --- a/pandas/_libs/tslib.pyx +++ b/pandas/_libs/tslib.pyx @@ -64,6 +64,7 @@ from pandas._libs.tslibs.conversion cimport ( get_datetime64_nanos, parse_pydatetime, ) +from pandas._libs.tslibs.dtypes cimport npy_unit_to_abbrev from pandas._libs.tslibs.nattype cimport ( NPY_NAT, c_NaT as NaT, @@ -277,6 +278,7 @@ def array_with_unit_to_datetime( result, tz = array_to_datetime( values.astype(object, copy=False), errors=errors, + creso=NPY_FR_ns, ) return result, tz @@ -408,6 +410,7 @@ cpdef array_to_datetime( bint dayfirst=False, bint yearfirst=False, bint utc=False, + NPY_DATETIMEUNIT creso=NPY_FR_ns, ): """ Converts a 1D array of date-like values to a numpy array of either: @@ -434,6 +437,7 @@ cpdef array_to_datetime( yearfirst parsing behavior when encountering datetime strings utc : bool, default False indicator whether the dates should be UTC + creso : NPY_DATETIMEUNIT, default NPY_FR_ns Returns ------- @@ -457,13 +461,14 @@ cpdef array_to_datetime( set out_tzoffset_vals = set() tzinfo tz_out = None cnp.flatiter it = cnp.PyArray_IterNew(values) - NPY_DATETIMEUNIT creso = NPY_FR_ns DatetimeParseState state = DatetimeParseState() + str reso_str # specify error conditions assert is_raise or is_ignore or is_coerce - result = np.empty((values).shape, dtype="M8[ns]") + reso_str = npy_unit_to_abbrev(creso) + result = np.empty((values).shape, dtype=f"M8[{reso_str}]") iresult = result.view("i8").ravel() for i in range(n): @@ -480,11 +485,11 @@ cpdef array_to_datetime( iresult[i] = parse_pydatetime(val, &dts, creso=creso) elif PyDate_Check(val): - iresult[i] = pydate_to_dt64(val, &dts) - check_dts_bounds(&dts) + iresult[i] = pydate_to_dt64(val, &dts, reso=creso) + check_dts_bounds(&dts, creso) elif is_datetime64_object(val): - iresult[i] = get_datetime64_nanos(val, NPY_FR_ns) + iresult[i] = get_datetime64_nanos(val, creso) elif is_integer_object(val) or is_float_object(val): # these must be ns unit by-definition @@ -493,7 +498,7 @@ cpdef array_to_datetime( iresult[i] = NPY_NAT else: # we now need to parse this as if unit='ns' - iresult[i] = cast_from_unit(val, "ns") + iresult[i] = cast_from_unit(val, "ns", out_reso=creso) elif isinstance(val, str): # string @@ -501,7 +506,7 @@ cpdef array_to_datetime( # GH#32264 np.str_ object val = str(val) - if parse_today_now(val, &iresult[i], utc): + if parse_today_now(val, &iresult[i], utc, creso): # We can't _quite_ dispatch this to convert_str_to_tsobject # bc there isn't a nice way to pass "utc" continue @@ -509,7 +514,7 @@ cpdef array_to_datetime( _ts = convert_str_to_tsobject( val, None, unit="ns", dayfirst=dayfirst, yearfirst=yearfirst ) - _ts.ensure_reso(NPY_FR_ns, val) + _ts.ensure_reso(creso, val) iresult[i] = _ts.value diff --git a/pandas/_libs/tslibs/conversion.pxd b/pandas/_libs/tslibs/conversion.pxd index 6ca84f060a0c4..ec743dbf98f6b 100644 --- a/pandas/_libs/tslibs/conversion.pxd +++ b/pandas/_libs/tslibs/conversion.pxd @@ -43,7 +43,9 @@ cdef int64_t get_datetime64_nanos(object val, NPY_DATETIMEUNIT reso) except? -1 cpdef datetime localize_pydatetime(datetime dt, tzinfo tz) cdef int64_t cast_from_unit(object ts, str unit, NPY_DATETIMEUNIT out_reso=*) except? -1 -cpdef (int64_t, int) precision_from_unit(str unit, NPY_DATETIMEUNIT out_reso=*) +cpdef (int64_t, int) precision_from_unit( + NPY_DATETIMEUNIT in_reso, NPY_DATETIMEUNIT out_reso=* +) cdef maybe_localize_tso(_TSObject obj, tzinfo tz, NPY_DATETIMEUNIT reso) diff --git a/pandas/_libs/tslibs/conversion.pyi b/pandas/_libs/tslibs/conversion.pyi index d564d767f7f05..be75c2f2f68ea 100644 --- a/pandas/_libs/tslibs/conversion.pyi +++ b/pandas/_libs/tslibs/conversion.pyi @@ -9,6 +9,6 @@ DT64NS_DTYPE: np.dtype TD64NS_DTYPE: np.dtype def precision_from_unit( - unit: str, + in_reso: int, # NPY_DATETIMEUNIT ) -> tuple[int, int]: ... # (int64_t, _) def localize_pydatetime(dt: datetime, tz: tzinfo | None) -> datetime: ... diff --git a/pandas/_libs/tslibs/conversion.pyx b/pandas/_libs/tslibs/conversion.pyx index 128eec66230f2..dc1cc906c9d07 100644 --- a/pandas/_libs/tslibs/conversion.pyx +++ b/pandas/_libs/tslibs/conversion.pyx @@ -106,6 +106,7 @@ cdef int64_t cast_from_unit( cdef: int64_t m int p + NPY_DATETIMEUNIT in_reso if unit in ["Y", "M"]: if is_float_object(ts) and not ts.is_integer(): @@ -123,7 +124,14 @@ cdef int64_t cast_from_unit( dt64obj = np.datetime64(ts, unit) return get_datetime64_nanos(dt64obj, out_reso) - m, p = precision_from_unit(unit, out_reso) + in_reso = abbrev_to_npy_unit(unit) + if out_reso < in_reso and in_reso != NPY_DATETIMEUNIT.NPY_FR_GENERIC: + # We will end up rounding (always *down*), so don't need the fractional + # part of `ts`. + m, _ = precision_from_unit(out_reso, in_reso) + return (ts) // m + + m, p = precision_from_unit(in_reso, out_reso) # cast the unit, multiply base/frac separately # to avoid precision issues from float -> int @@ -146,8 +154,8 @@ cdef int64_t cast_from_unit( ) from err -cpdef inline (int64_t, int) precision_from_unit( - str unit, +cpdef (int64_t, int) precision_from_unit( + NPY_DATETIMEUNIT in_reso, NPY_DATETIMEUNIT out_reso=NPY_DATETIMEUNIT.NPY_FR_ns, ): """ @@ -163,17 +171,16 @@ cpdef inline (int64_t, int) precision_from_unit( int64_t m int64_t multiplier int p - NPY_DATETIMEUNIT reso = abbrev_to_npy_unit(unit) - if reso == NPY_DATETIMEUNIT.NPY_FR_GENERIC: - reso = NPY_DATETIMEUNIT.NPY_FR_ns - if reso == NPY_DATETIMEUNIT.NPY_FR_Y: + if in_reso == NPY_DATETIMEUNIT.NPY_FR_GENERIC: + in_reso = NPY_DATETIMEUNIT.NPY_FR_ns + if in_reso == NPY_DATETIMEUNIT.NPY_FR_Y: # each 400 years we have 97 leap years, for an average of 97/400=.2425 # extra days each year. We get 31556952 by writing # 3600*24*365.2425=31556952 multiplier = periods_per_second(out_reso) m = multiplier * 31556952 - elif reso == NPY_DATETIMEUNIT.NPY_FR_M: + elif in_reso == NPY_DATETIMEUNIT.NPY_FR_M: # 2629746 comes from dividing the "Y" case by 12. multiplier = periods_per_second(out_reso) m = multiplier * 2629746 @@ -181,7 +188,7 @@ cpdef inline (int64_t, int) precision_from_unit( # Careful: if get_conversion_factor raises, the exception does # not propagate, instead we get a warning about an ignored exception. # https://github.com/pandas-dev/pandas/pull/51483#discussion_r1115198951 - m = get_conversion_factor(reso, out_reso) + m = get_conversion_factor(in_reso, out_reso) p = log10(m) # number of digits in 'm' minus 1 return m, p diff --git a/pandas/_libs/tslibs/strptime.pxd b/pandas/_libs/tslibs/strptime.pxd index d09612f4fbf7d..32a8edc9ee4a3 100644 --- a/pandas/_libs/tslibs/strptime.pxd +++ b/pandas/_libs/tslibs/strptime.pxd @@ -4,8 +4,10 @@ from cpython.datetime cimport ( ) from numpy cimport int64_t +from pandas._libs.tslibs.np_datetime cimport NPY_DATETIMEUNIT -cdef bint parse_today_now(str val, int64_t* iresult, bint utc) + +cdef bint parse_today_now(str val, int64_t* iresult, bint utc, NPY_DATETIMEUNIT creso) cdef class DatetimeParseState: diff --git a/pandas/_libs/tslibs/strptime.pyx b/pandas/_libs/tslibs/strptime.pyx index 30ce01d2930c1..69466511a67fd 100644 --- a/pandas/_libs/tslibs/strptime.pyx +++ b/pandas/_libs/tslibs/strptime.pyx @@ -111,22 +111,27 @@ def _test_format_is_iso(f: str) -> bool: return format_is_iso(f) -cdef bint parse_today_now(str val, int64_t* iresult, bint utc): +cdef bint parse_today_now( + str val, int64_t* iresult, bint utc, NPY_DATETIMEUNIT creso +): # We delay this check for as long as possible # because it catches relatively rare cases + cdef: + _Timestamp ts - # Multiply by 1000 to convert to nanos, since these methods naturally have - # microsecond resolution if val == "now": if utc: - iresult[0] = Timestamp.utcnow()._value * 1000 + ts = <_Timestamp>Timestamp.utcnow() + iresult[0] = ts._as_creso(creso)._value else: # GH#18705 make sure to_datetime("now") matches Timestamp("now") # Note using Timestamp.now() is faster than Timestamp("now") - iresult[0] = Timestamp.now()._value * 1000 + ts = <_Timestamp>Timestamp.now() + iresult[0] = ts._as_creso(creso)._value return True elif val == "today": - iresult[0] = Timestamp.today()._value * 1000 + ts = <_Timestamp>Timestamp.today() + iresult[0] = ts._as_creso(creso)._value return True return False @@ -363,7 +368,7 @@ def array_strptime( check_dts_bounds(&dts) continue - if parse_today_now(val, &iresult[i], utc): + if parse_today_now(val, &iresult[i], utc, NPY_FR_ns): continue # Some ISO formats can't be parsed by string_to_dts diff --git a/pandas/_libs/tslibs/timedeltas.pyx b/pandas/_libs/tslibs/timedeltas.pyx index a423137c68123..e67c0fd91cd6f 100644 --- a/pandas/_libs/tslibs/timedeltas.pyx +++ b/pandas/_libs/tslibs/timedeltas.pyx @@ -303,18 +303,16 @@ cdef object ensure_td64ns(object ts): cdef: NPY_DATETIMEUNIT td64_unit int64_t td64_value, mult - str unitstr td64_unit = get_datetime64_unit(ts) if ( td64_unit != NPY_DATETIMEUNIT.NPY_FR_ns and td64_unit != NPY_DATETIMEUNIT.NPY_FR_GENERIC ): - unitstr = npy_unit_to_abbrev(td64_unit) td64_value = cnp.get_timedelta64_value(ts) - mult = precision_from_unit(unitstr)[0] + mult = precision_from_unit(td64_unit)[0] try: # NB: cython#1381 this cannot be *= td64_value = td64_value * mult diff --git a/pandas/core/arrays/datetimes.py b/pandas/core/arrays/datetimes.py index eb7a38df3fee9..3fff5cb2aa0c7 100644 --- a/pandas/core/arrays/datetimes.py +++ b/pandas/core/arrays/datetimes.py @@ -2251,6 +2251,7 @@ def _sequence_to_dt64ns( dayfirst=dayfirst, yearfirst=yearfirst, allow_object=False, + out_unit=out_unit or "ns", ) copy = False if tz and inferred_tz: @@ -2258,11 +2259,11 @@ def _sequence_to_dt64ns( assert converted.dtype == "i8" # GH#42505 # by convention, these are _already_ UTC, e.g - result = converted.view(DT64NS_DTYPE) + result = converted.view(out_dtype) elif inferred_tz: tz = inferred_tz - result = converted.view(DT64NS_DTYPE) + result = converted.view(out_dtype) else: result, _ = _construct_from_dt64_naive( @@ -2360,6 +2361,7 @@ def objects_to_datetime64ns( utc: bool = False, errors: DateTimeErrorChoices = "raise", allow_object: bool = False, + out_unit: str = "ns", ): """ Convert data to array of timestamps. @@ -2375,6 +2377,7 @@ def objects_to_datetime64ns( allow_object : bool Whether to return an object-dtype ndarray instead of raising if the data contains more than one timezone. + out_unit : str, default "ns" Returns ------- @@ -2399,6 +2402,7 @@ def objects_to_datetime64ns( utc=utc, dayfirst=dayfirst, yearfirst=yearfirst, + creso=abbrev_to_npy_unit(out_unit), ) if tz_parsed is not None: diff --git a/pandas/core/arrays/timedeltas.py b/pandas/core/arrays/timedeltas.py index 128f1c73062c0..079ac3562c3d9 100644 --- a/pandas/core/arrays/timedeltas.py +++ b/pandas/core/arrays/timedeltas.py @@ -29,6 +29,7 @@ to_offset, ) from pandas._libs.tslibs.conversion import precision_from_unit +from pandas._libs.tslibs.dtypes import abbrev_to_npy_unit from pandas._libs.tslibs.fields import ( get_timedelta_days, get_timedelta_field, @@ -1078,7 +1079,7 @@ def sequence_to_td64ns( else: mask = np.isnan(data) # The next few lines are effectively a vectorized 'cast_from_unit' - m, p = precision_from_unit(unit or "ns") + m, p = precision_from_unit(abbrev_to_npy_unit(unit or "ns")) with warnings.catch_warnings(): # Suppress RuntimeWarning about All-NaN slice warnings.filterwarnings( diff --git a/pandas/core/dtypes/astype.py b/pandas/core/dtypes/astype.py index ac3a44276ac6d..f5579082c679b 100644 --- a/pandas/core/dtypes/astype.py +++ b/pandas/core/dtypes/astype.py @@ -105,11 +105,10 @@ def _astype_nansafe( # then coerce to datetime64[ns] and use DatetimeArray.astype if lib.is_np_dtype(dtype, "M"): - from pandas import to_datetime + from pandas.core.arrays import DatetimeArray - dti = to_datetime(arr.ravel()) - dta = dti._data.reshape(arr.shape) - return dta.astype(dtype, copy=False)._ndarray + dta = DatetimeArray._from_sequence(arr, dtype=dtype) + return dta._ndarray elif lib.is_np_dtype(dtype, "m"): from pandas.core.construction import ensure_wrapped_if_datetimelike diff --git a/pandas/core/tools/datetimes.py b/pandas/core/tools/datetimes.py index b4374f8998bc6..95328d10c9d31 100644 --- a/pandas/core/tools/datetimes.py +++ b/pandas/core/tools/datetimes.py @@ -31,6 +31,7 @@ timezones as libtimezones, ) from pandas._libs.tslibs.conversion import precision_from_unit +from pandas._libs.tslibs.dtypes import abbrev_to_npy_unit from pandas._libs.tslibs.parsing import ( DateParseError, guess_datetime_format, @@ -550,7 +551,7 @@ def _to_datetime_with_unit(arg, unit, name, utc: bool, errors: str) -> Index: tz_parsed = None elif arg.dtype.kind == "f": - mult, _ = precision_from_unit(unit) + mult, _ = precision_from_unit(abbrev_to_npy_unit(unit)) mask = np.isnan(arg) | (arg == iNaT) fvalues = (arg * mult).astype("f8", copy=False) diff --git a/pandas/tests/frame/methods/test_astype.py b/pandas/tests/frame/methods/test_astype.py index a6caafb48ca4d..eac10d307c61c 100644 --- a/pandas/tests/frame/methods/test_astype.py +++ b/pandas/tests/frame/methods/test_astype.py @@ -382,7 +382,12 @@ def test_astype_from_object_to_datetime_unit(self, unit): ["2017-01-01", "2017-01-02", "2017-02-03"], ] df = DataFrame(vals, dtype=object) - with pytest.raises(TypeError, match="Cannot cast"): + msg = ( + rf"Unexpected value for 'dtype': 'datetime64\[{unit}\]'. " + r"Must be 'datetime64\[s\]', 'datetime64\[ms\]', 'datetime64\[us\]', " + r"'datetime64\[ns\]' or DatetimeTZDtype" + ) + with pytest.raises(ValueError, match=msg): df.astype(f"M8[{unit}]") @pytest.mark.parametrize("unit", ["Y", "M", "W", "D", "h", "m"]) diff --git a/pandas/tests/indexes/datetimes/test_constructors.py b/pandas/tests/indexes/datetimes/test_constructors.py index 08ec11c87b623..ef86e7800dbb7 100644 --- a/pandas/tests/indexes/datetimes/test_constructors.py +++ b/pandas/tests/indexes/datetimes/test_constructors.py @@ -1013,6 +1013,40 @@ def test_dti_convert_datetime_list(self, tzstr): dr2 = DatetimeIndex(list(dr), name="foo", freq="D") tm.assert_index_equal(dr, dr2) + def test_dti_constructor_with_non_nano_dtype(self): + # GH#55756 + ts = Timestamp("2999-01-01") + dtype = "M8[us]" + # NB: the 2500 is interpreted as nanoseconds and rounded *down* + # to 2 microseconds + vals = [ts, "2999-01-02 03:04:05.678910", 2500] + result = DatetimeIndex(vals, dtype=dtype) + exp_arr = np.array([ts.asm8, vals[1], 2], dtype=dtype) + expected = DatetimeIndex(exp_arr, dtype=dtype) + tm.assert_index_equal(result, expected) + + result2 = DatetimeIndex(np.array(vals, dtype=object), dtype=dtype) + tm.assert_index_equal(result2, expected) + + def test_dti_constructor_with_non_nano_now_today(self): + # GH#55756 + now = Timestamp.now() + today = Timestamp.today() + result = DatetimeIndex(["now", "today"], dtype="M8[s]") + assert result.dtype == "M8[s]" + + # result may not exactly match [now, today] so we'll test it up to a tolerance. + # (it *may* match exactly due to rounding) + tolerance = pd.Timedelta(microseconds=1) + + diff0 = result[0] - now.as_unit("s") + assert diff0 >= pd.Timedelta(0) + assert diff0 < tolerance + + diff1 = result[1] - today.as_unit("s") + assert diff1 >= pd.Timedelta(0) + assert diff1 < tolerance + class TestTimeSeries: def test_dti_constructor_preserve_dti_freq(self): diff --git a/pandas/tests/series/methods/test_astype.py b/pandas/tests/series/methods/test_astype.py index faa3978038dd5..edd3062f7d472 100644 --- a/pandas/tests/series/methods/test_astype.py +++ b/pandas/tests/series/methods/test_astype.py @@ -107,6 +107,20 @@ def test_astype_dict_like(self, dtype_class): class TestAstype: + def test_astype_object_to_dt64_non_nano(self): + # GH#55756 + ts = Timestamp("2999-01-01") + dtype = "M8[us]" + # NB: the 2500 is interpreted as nanoseconds and rounded *down* + # to 2 microseconds + vals = [ts, "2999-01-02 03:04:05.678910", 2500] + ser = Series(vals, dtype=object) + result = ser.astype(dtype) + + exp_arr = np.array([ts.asm8, vals[1], 2], dtype=dtype) + expected = Series(exp_arr, dtype=dtype) + tm.assert_series_equal(result, expected) + def test_astype_mixed_object_to_dt64tz(self): # pre-2.0 this raised ValueError bc of tz mismatch # xref GH#32581