Skip to content

Commit

Permalink
RDBC-767 TimeSeriesOperationsTest::canDeleteLargeRange
Browse files Browse the repository at this point in the history
  • Loading branch information
poissoncorp committed Dec 15, 2023
1 parent 6a4a2bb commit de73964
Showing 1 changed file with 98 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from datetime import datetime, timedelta

from ravendb import SessionOptions
from ravendb.documents.operations.time_series import (
GetTimeSeriesOperation,
TimeSeriesOperation,
Expand All @@ -9,6 +10,7 @@
TimeSeriesDetails,
)
from ravendb.documents.session.time_series import TimeSeriesRange
from ravendb.tests.jvm_migrated_tests.client_tests.time_series_tests.test_time_series_raw_query import RawQueryResult
from ravendb.tests.test_base import TestBase, User


Expand Down Expand Up @@ -464,3 +466,99 @@ def test_get_time_series_should_throw_on_missing_name(self):
self.assertIn(ex.args[0], "Timeseries cannot be None or empty")

self.assertTrue(failed)

def test_can_delete_large_range(self):
document_id = "foo/bar"
base_line = datetime(2023, 8, 20, 0, 0) - timedelta(seconds=1)

with self.store.open_session() as session:
session.store(User(), document_id)
tsf = session.time_series_for(document_id, "BloodPressure")

for j in range(1, 10000, 1):
offset = j * 10
time = base_line + timedelta(seconds=offset)

tsf.append(time, [j], "watches/apple")

session.save_changes()

raw_query = (
"declare timeseries blood_pressure(doc)\n"
" {\n"
" from doc.BloodPressure between $start and $end\n"
" group by 1h\n"
" select min(), max(), avg(), first(), last()\n"
" }\n"
" from Users as p\n"
" select blood_pressure(p) as blood_pressure"
)

with self.store.open_session() as session:
query = (
session.advanced.raw_query(raw_query, RawQueryResult)
.add_parameter("start", base_line)
.add_parameter("end", base_line + timedelta(days=1))
)

result = list(query)

self.assertEqual(1, len(result))

agg = result[0]

blood_pressure = agg.blood_pressure
count = sum(map(lambda x: x.count[0], blood_pressure.results))
self.assertEqual(8640, count)
self.assertEqual(blood_pressure.count, count)
self.assertEqual(24, len(blood_pressure.results))

for index in range(len(blood_pressure.results)):
item = blood_pressure.results[index]
self.assertEqual(360, item.count[0])
self.assertEqual(index * 360 + 180 + 0.5, item.average[0])
self.assertEqual((index + 1) * 360, item.max[0])
self.assertEqual(index * 360 + 1, item.min[0])
self.assertEqual(index * 360 + 1, item.first[0])
self.assertEqual((index + 1) * 360, item.last[0])

with self.store.open_session() as session:
tsf = session.time_series_for(document_id, "BloodPressure")
tsf.delete(base_line + timedelta(seconds=3600), base_line + timedelta(seconds=3600 * 10)) # remove 9 hours
session.save_changes()

session_options = SessionOptions(no_caching=True)
with self.store.open_session(session_options=session_options) as session:
query = (
session.advanced.raw_query(raw_query, RawQueryResult)
.add_parameter("start", base_line)
.add_parameter("end", base_line + timedelta(days=1))
)
result = list(query)
agg = result[0]
blood_pressure = agg.blood_pressure
count = sum(map(lambda x: x.count[0], blood_pressure.results))
self.assertEqual(5399, count)
self.assertEqual(blood_pressure.count, count)
self.assertEqual(15, len(blood_pressure.results))

index = 0

item = blood_pressure.results[index]
self.assertEqual(359, item.count[0])
self.assertEqual(180, item.average[0])
self.assertEqual(359, item.max[0])
self.assertEqual(1, item.min[0])
self.assertEqual(1, item.first[0])
self.assertEqual(359, item.last[0])

for index in range(1, len(blood_pressure.results)):
item = blood_pressure.results[index]
real_index = index + 9

self.assertEqual(360, item.count[0])
self.assertEqual(real_index * 360 + 180 + 0.5, item.average[0])
self.assertEqual((real_index + 1) * 360, item.max[0])
self.assertEqual(real_index * 360 + 1, item.min[0])
self.assertEqual(real_index * 360 + 1, item.first[0])
self.assertEqual((real_index + 1) * 360, item.last[0])

0 comments on commit de73964

Please sign in to comment.