Skip to content

Commit

Permalink
Merge pull request #1720 from aarchiba/clock-error-reporting
Browse files Browse the repository at this point in the history
Improve clock error reporting
  • Loading branch information
dlakaplan authored Feb 21, 2024
2 parents 5810c83 + 8b7f6c1 commit eb1f48b
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 7 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: check-yaml
- id: check-merge-conflict
- id: check-symlinks
- repo: https://github.com/psf/black
rev: 23.1.0
rev: 24.2.0
hooks:
- id: black
1 change: 1 addition & 0 deletions CHANGELOG-unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ the released changes.
- Moved `get_derived_params` to `timing_model`
- `check_ephemeris_connection` CI test no longer requires access to static NANOGrav site
- `TimingModel.compare()` now calls `change_binary_epoch()`.
- When clock files contain out-of-order entries, the exception now records the first MJDs that are out of order
### Added
- Added numdifftools to setup.cfg to match requirements.txt
- Documentation: Added `convert_parfile` to list of command-line tools in RTD
Expand Down
3 changes: 2 additions & 1 deletion src/pint/observatory/clock_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,9 @@ def __init__(
raise ValueError(f"MJDs have {len(mjd)} entries but clock has {len(clock)}")
self._time = Time(mjd, format="pulsar_mjd", scale="utc")
if not np.all(np.diff(self._time.mjd) >= 0):
i = np.where(np.diff(self._time.mjd) < 0)[0][0]
raise ValueError(
f"Clock file {self.friendly_name} appears to be out of order"
f"Clock file {self.friendly_name} appears to be out of order: {self._time[i]} > {self._time[i+1]}"
)
self._clock = clock.to(u.us)
if comments is None:
Expand Down
22 changes: 18 additions & 4 deletions tests/test_clock_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,8 @@ def test_merge_mjds_trims_range():
ca = ClockFile(mjd=a, clock=np.zeros_like(a) * u.s)
cb = ClockFile(mjd=b, clock=np.zeros_like(b) * u.s)

m = ClockFile.merge([ca, cb])
with pytest.warns(UserWarning, match="out of range"):
m = ClockFile.merge([ca, cb])
assert_array_equal(m.time.mjd, np.array([50000, 55000, 60000]))


Expand All @@ -118,7 +119,8 @@ def test_merge_mjds_trims_range_repeat_beginning():
ca = ClockFile(mjd=a, clock=np.zeros_like(a) * u.s)
cb = ClockFile(mjd=b, clock=np.zeros_like(b) * u.s)

m = ClockFile.merge([ca, cb])
with pytest.warns(UserWarning, match="out of range"):
m = ClockFile.merge([ca, cb])
assert_array_equal(m.time.mjd, np.array([50000, 50000, 55000, 60000]))


Expand All @@ -129,7 +131,8 @@ def test_merge_mjds_trims_range_repeat_end():
ca = ClockFile(mjd=a, clock=np.zeros_like(a) * u.s)
cb = ClockFile(mjd=b, clock=np.zeros_like(b) * u.s)

m = ClockFile.merge([ca, cb])
with pytest.warns(UserWarning, match="out of range"):
m = ClockFile.merge([ca, cb])
assert_array_equal(m.time.mjd, np.array([50000, 55000, 60000, 60000]))


Expand All @@ -139,7 +142,8 @@ def test_merge_mjds_trims_range_mixed():

ca = ClockFile(mjd=a, clock=np.zeros_like(a) * u.s)
cb = ClockFile(mjd=b, clock=np.zeros_like(b) * u.s)
m = ClockFile.merge([ca, cb])
with pytest.warns(UserWarning, match="out of range"):
m = ClockFile.merge([ca, cb])
assert_array_equal(m.time.mjd, np.array([50000, 55000, 60000]))


Expand Down Expand Up @@ -469,3 +473,13 @@ def test_out_of_range_allowed():
valid_beyond_ends=True,
)
basic_clock.evaluate(Time(60001, format="mjd"), limits="error")


def test_out_of_order_raises_exception():
with pytest.raises(ValueError) as excinfo:
ClockFile(
mjd=np.array([50000, 55000, 54000, 60000]),
clock=np.array([1.0, 2.0, -1.0, 1.0]) * u.us,
friendly_name="basic_clock",
)
assert "55000" in str(excinfo.value)

0 comments on commit eb1f48b

Please sign in to comment.