6565_ALL_CALENDARS  =  sorted (_NON_STANDARD_CALENDARS_SET .union (_STANDARD_CALENDARS ))
6666_NON_STANDARD_CALENDARS  =  sorted (_NON_STANDARD_CALENDARS_SET )
6767_CF_DATETIME_NUM_DATES_UNITS  =  [
68-     (np .arange (10 ), "days since 2000-01-01" ),
69-     (np .arange (10 ).astype ("float64" ), "days since 2000-01-01" ),
70-     (np .arange (10 ).astype ("float32" ), "days since 2000-01-01" ),
71-     (np .arange (10 ).reshape (2 , 5 ), "days since 2000-01-01" ),
72-     (12300  +  np .arange (5 ), "hours since 1680-01-01 00:00:00" ),
68+     (np .arange (10 ), "days since 2000-01-01" ,  "s" ),
69+     (np .arange (10 ).astype ("float64" ), "days since 2000-01-01" ,  "s" ),
70+     (np .arange (10 ).astype ("float32" ), "days since 2000-01-01" ,  "s" ),
71+     (np .arange (10 ).reshape (2 , 5 ), "days since 2000-01-01" ,  "s" ),
72+     (12300  +  np .arange (5 ), "hours since 1680-01-01 00:00:00" ,  "s" ),
7373    # here we add a couple minor formatting errors to test 
7474    # the robustness of the parsing algorithm. 
75-     (12300  +  np .arange (5 ), "hour since 1680-01-01  00:00:00" ),
76-     (12300  +  np .arange (5 ), "Hour  since 1680-01-01 00:00:00" ),
77-     (12300  +  np .arange (5 ), " Hour  since  1680-01-01 00:00:00 " ),
78-     (10 , "days since 2000-01-01" ),
79-     ([10 ], "daYs  since 2000-01-01" ),
80-     ([[10 ]], "days since 2000-01-01" ),
81-     ([10 , 10 ], "days since 2000-01-01" ),
82-     (np .array (10 ), "days since 2000-01-01" ),
83-     (0 , "days since 1000-01-01" ),
84-     ([0 ], "days since 1000-01-01" ),
85-     ([[0 ]], "days since 1000-01-01" ),
86-     (np .arange (2 ), "days since 1000-01-01" ),
87-     (np .arange (0 , 100000 , 20000 ), "days since 1900-01-01" ),
88-     (np .arange (0 , 100000 , 20000 ), "days since 1-01-01" ),
89-     (17093352.0 , "hours since 1-1-1 00:00:0.0" ),
90-     ([0.5 , 1.5 ], "hours since 1900-01-01T00:00:00" ),
91-     (0 , "milliseconds since 2000-01-01T00:00:00" ),
92-     (0 , "microseconds since 2000-01-01T00:00:00" ),
93-     (np .int32 (788961600 ), "seconds since 1981-01-01" ),  # GH2002 
94-     (12300  +  np .arange (5 ), "hour since 1680-01-01 00:00:00.500000" ),
95-     (164375 , "days since 1850-01-01 00:00:00" ),
96-     (164374.5 , "days since 1850-01-01 00:00:00" ),
97-     ([164374.5 , 168360.5 ], "days since 1850-01-01 00:00:00" ),
75+     (12300  +  np .arange (5 ), "hour since 1680-01-01  00:00:00" ,  "s" ),
76+     (12300  +  np .arange (5 ), "Hour  since 1680-01-01 00:00:00" ,  "s" ),
77+     (12300  +  np .arange (5 ), " Hour  since  1680-01-01 00:00:00 " ,  "s" ),
78+     (10 , "days since 2000-01-01" ,  "s" ),
79+     ([10 ], "daYs  since 2000-01-01" ,  "s" ),
80+     ([[10 ]], "days since 2000-01-01" ,  "s" ),
81+     ([10 , 10 ], "days since 2000-01-01" ,  "s" ),
82+     (np .array (10 ), "days since 2000-01-01" ,  "s" ),
83+     (0 , "days since 1000-01-01" ,  "s" ),
84+     ([0 ], "days since 1000-01-01" ,  "s" ),
85+     ([[0 ]], "days since 1000-01-01" ,  "s" ),
86+     (np .arange (2 ), "days since 1000-01-01" ,  "s" ),
87+     (np .arange (0 , 100000 , 20000 ), "days since 1900-01-01" ,  "s" ),
88+     (np .arange (0 , 100000 , 20000 ), "days since 1-01-01" ,  "s" ),
89+     (17093352.0 , "hours since 1-1-1 00:00:0.0" ,  "s" ),
90+     ([0.5 , 1.5 ], "hours since 1900-01-01T00:00:00" ,  "s" ),
91+     (0 , "milliseconds since 2000-01-01T00:00:00" ,  "s" ),
92+     (0 , "microseconds since 2000-01-01T00:00:00" ,  "s" ),
93+     (np .int32 (788961600 ), "seconds since 1981-01-01" ,  "s" ),  # GH2002 
94+     (12300  +  np .arange (5 ), "hour since 1680-01-01 00:00:00.500000" ,  "us" ),
95+     (164375 , "days since 1850-01-01 00:00:00" ,  "s" ),
96+     (164374.5 , "days since 1850-01-01 00:00:00" ,  "s" ),
97+     ([164374.5 , 168360.5 ], "days since 1850-01-01 00:00:00" ,  "s" ),
9898]
9999_CF_DATETIME_TESTS  =  [
100100    num_dates_units  +  (calendar ,)
@@ -122,9 +122,15 @@ def _all_cftime_date_types():
122122@requires_cftime  
123123@pytest .mark .filterwarnings ("ignore:Ambiguous reference date string" ) 
124124@pytest .mark .filterwarnings ("ignore:Times can't be serialized faithfully" ) 
125- @pytest .mark .parametrize (["num_dates" , "units" , "calendar" ], _CF_DATETIME_TESTS ) 
125+ @pytest .mark .parametrize ( 
126+     ["num_dates" , "units" , "minimum_resolution" , "calendar" ], _CF_DATETIME_TESTS  
127+ ) 
126128def  test_cf_datetime (
127-     num_dates , units , calendar , time_unit : PDDatetimeUnitOptions 
129+     num_dates ,
130+     units : str ,
131+     minimum_resolution : PDDatetimeUnitOptions ,
132+     calendar : str ,
133+     time_unit : PDDatetimeUnitOptions ,
128134) ->  None :
129135    import  cftime 
130136
@@ -137,25 +143,23 @@ def test_cf_datetime(
137143        actual  =  decode_cf_datetime (num_dates , units , calendar , time_unit = time_unit )
138144
139145    if  actual .dtype .kind  !=  "O" :
140-         expected  =  cftime_to_nptime (expected , time_unit = time_unit )
141- 
142-     abs_diff  =  np .asarray (abs (actual  -  expected )).ravel ()
143-     abs_diff  =  pd .to_timedelta (abs_diff .tolist ()).to_numpy ()
146+         if  np .timedelta64 (1 , time_unit ) >  np .timedelta64 (1 , minimum_resolution ):
147+             expected_unit  =  minimum_resolution 
148+         else :
149+             expected_unit  =  time_unit 
150+         expected  =  cftime_to_nptime (expected , time_unit = expected_unit )
144151
145-     # once we no longer support versions of netCDF4 older than 1.1.5, 
146-     # we could do this check with near microsecond accuracy: 
147-     # https://github.com/Unidata/netcdf4-python/issues/355 
148-     assert  (abs_diff  <=  np .timedelta64 (1 , "s" )).all ()
152+     assert_array_equal (actual , expected )
149153    encoded1 , _ , _  =  encode_cf_datetime (actual , units , calendar )
150154
151-     assert_duckarray_allclose (num_dates , encoded1 )
155+     assert_array_equal (num_dates , encoded1 )
152156
153157    if  hasattr (num_dates , "ndim" ) and  num_dates .ndim  ==  1  and  "1000"  not  in   units :
154158        # verify that wrapping with a pandas.Index works 
155159        # note that it *does not* currently work to put 
156160        # non-datetime64 compatible dates into a pandas.Index 
157161        encoded2 , _ , _  =  encode_cf_datetime (pd .Index (actual ), units , calendar )
158-         assert_duckarray_allclose (num_dates , encoded2 )
162+         assert_array_equal (num_dates , encoded2 )
159163
160164
161165@requires_cftime  
@@ -206,11 +210,7 @@ def test_decode_cf_datetime_non_iso_strings() -> None:
206210    ]
207211    for  num_dates , units  in  cases :
208212        actual  =  decode_cf_datetime (num_dates , units )
209-         abs_diff  =  abs (actual  -  expected .values )
210-         # once we no longer support versions of netCDF4 older than 1.1.5, 
211-         # we could do this check with near microsecond accuracy: 
212-         # https://github.com/Unidata/netcdf4-python/issues/355 
213-         assert  (abs_diff  <=  np .timedelta64 (1 , "s" )).all ()
213+         assert_array_equal (actual , expected )
214214
215215
216216@requires_cftime  
@@ -220,7 +220,7 @@ def test_decode_standard_calendar_inside_timestamp_range(
220220) ->  None :
221221    import  cftime 
222222
223-     units  =  "days  since 0001-01-01" 
223+     units  =  "hours  since 0001-01-01" 
224224    times  =  pd .date_range (
225225        "2001-04-01-00" , end = "2001-04-30-23" , unit = time_unit , freq = "h" 
226226    )
@@ -233,11 +233,7 @@ def test_decode_standard_calendar_inside_timestamp_range(
233233    # representable with nanosecond resolution. 
234234    actual  =  decode_cf_datetime (time , units , calendar = calendar , time_unit = time_unit )
235235    assert  actual .dtype  ==  np .dtype (f"=M8[{ time_unit }  ]" )
236-     abs_diff  =  abs (actual  -  expected )
237-     # once we no longer support versions of netCDF4 older than 1.1.5, 
238-     # we could do this check with near microsecond accuracy: 
239-     # https://github.com/Unidata/netcdf4-python/issues/355 
240-     assert  (abs_diff  <=  np .timedelta64 (1 , "s" )).all ()
236+     assert_array_equal (actual , expected )
241237
242238
243239@requires_cftime  
@@ -256,11 +252,7 @@ def test_decode_non_standard_calendar_inside_timestamp_range(calendar) -> None:
256252
257253    actual  =  decode_cf_datetime (non_standard_time , units , calendar = calendar )
258254    assert  actual .dtype  ==  expected_dtype 
259-     abs_diff  =  abs (actual  -  expected )
260-     # once we no longer support versions of netCDF4 older than 1.1.5, 
261-     # we could do this check with near microsecond accuracy: 
262-     # https://github.com/Unidata/netcdf4-python/issues/355 
263-     assert  (abs_diff  <=  np .timedelta64 (1 , "s" )).all ()
255+     assert_array_equal (actual , expected )
264256
265257
266258@requires_cftime  
@@ -287,11 +279,7 @@ def test_decode_dates_outside_timestamp_range(
287279        warnings .filterwarnings ("ignore" , "Unable to decode time axis" )
288280        actual  =  decode_cf_datetime (time , units , calendar = calendar , time_unit = time_unit )
289281    assert  all (isinstance (value , expected_date_type ) for  value  in  actual )
290-     abs_diff  =  abs (actual  -  expected )
291-     # once we no longer support versions of netCDF4 older than 1.1.5, 
292-     # we could do this check with near microsecond accuracy: 
293-     # https://github.com/Unidata/netcdf4-python/issues/355 
294-     assert  (abs_diff  <=  np .timedelta64 (1 , "us" )).all ()
282+     assert_array_equal (actual , expected )
295283
296284
297285@requires_cftime  
@@ -367,14 +355,8 @@ def test_decode_standard_calendar_multidim_time_inside_timestamp_range(
367355        mdim_time , units , calendar = calendar , time_unit = time_unit 
368356    )
369357    assert  actual .dtype  ==  np .dtype (f"=M8[{ time_unit }  ]" )
370- 
371-     abs_diff1  =  abs (actual [:, 0 ] -  expected1 )
372-     abs_diff2  =  abs (actual [:, 1 ] -  expected2 )
373-     # once we no longer support versions of netCDF4 older than 1.1.5, 
374-     # we could do this check with near microsecond accuracy: 
375-     # https://github.com/Unidata/netcdf4-python/issues/355 
376-     assert  (abs_diff1  <=  np .timedelta64 (1 , "s" )).all ()
377-     assert  (abs_diff2  <=  np .timedelta64 (1 , "s" )).all ()
358+     assert_array_equal (actual [:, 0 ], expected1 )
359+     assert_array_equal (actual [:, 1 ], expected2 )
378360
379361
380362@requires_cftime  
@@ -409,13 +391,8 @@ def test_decode_nonstandard_calendar_multidim_time_inside_timestamp_range(
409391    actual  =  decode_cf_datetime (mdim_time , units , calendar = calendar )
410392
411393    assert  actual .dtype  ==  expected_dtype 
412-     abs_diff1  =  abs (actual [:, 0 ] -  expected1 )
413-     abs_diff2  =  abs (actual [:, 1 ] -  expected2 )
414-     # once we no longer support versions of netCDF4 older than 1.1.5, 
415-     # we could do this check with near microsecond accuracy: 
416-     # https://github.com/Unidata/netcdf4-python/issues/355 
417-     assert  (abs_diff1  <=  np .timedelta64 (1 , "s" )).all ()
418-     assert  (abs_diff2  <=  np .timedelta64 (1 , "s" )).all ()
394+     assert_array_equal (actual [:, 0 ], expected1 )
395+     assert_array_equal (actual [:, 1 ], expected2 )
419396
420397
421398@requires_cftime  
@@ -455,14 +432,8 @@ def test_decode_multidim_time_outside_timestamp_range(
455432        dtype  =  np .dtype (f"=M8[{ time_unit }  ]" )
456433
457434    assert  actual .dtype  ==  dtype 
458- 
459-     abs_diff1  =  abs (actual [:, 0 ] -  expected1 )
460-     abs_diff2  =  abs (actual [:, 1 ] -  expected2 )
461-     # once we no longer support versions of netCDF4 older than 1.1.5, 
462-     # we could do this check with near microsecond accuracy: 
463-     # https://github.com/Unidata/netcdf4-python/issues/355 
464-     assert  (abs_diff1  <=  np .timedelta64 (1 , "s" )).all ()
465-     assert  (abs_diff2  <=  np .timedelta64 (1 , "s" )).all ()
435+     assert_array_equal (actual [:, 0 ], expected1 )
436+     assert_array_equal (actual [:, 1 ], expected2 )
466437
467438
468439@requires_cftime  
0 commit comments