diff --git a/ravendb/__init__.py b/ravendb/__init__.py index 6c81869d..4cb7642c 100644 --- a/ravendb/__init__.py +++ b/ravendb/__init__.py @@ -63,15 +63,15 @@ GetCompareExchangeValuesOperation, DeleteCompareExchangeValueOperation, ) -from ravendb.documents.operations.configuration import ( +from ravendb.documents.operations.configuration.operations import ( GetServerWideClientConfigurationOperation, PutServerWideClientConfigurationOperation, ClientConfiguration, GetClientConfigurationOperation, PutClientConfigurationOperation, - StudioConfiguration, - StudioEnvironment, ) +from ravendb.documents.operations.configuration.definitions import StudioConfiguration, StudioEnvironment + from ravendb.documents.operations.connection_strings import ConnectionString from ravendb.documents.operations.etl.configuration import EtlConfiguration, RavenEtlConfiguration from ravendb.documents.operations.etl.olap import OlapEtlConfiguration diff --git a/ravendb/documents/commands/batches.py b/ravendb/documents/commands/batches.py index 1f3b754d..79d3f8ec 100644 --- a/ravendb/documents/commands/batches.py +++ b/ravendb/documents/commands/batches.py @@ -15,7 +15,6 @@ ) from ravendb.documents.operations.time_series import TimeSeriesOperation from ravendb.documents.session.misc import TransactionMode, ForceRevisionStrategy -from ravendb.documents.time_series import TimeSeriesOperations from ravendb.http.raven_command import RavenCommand from ravendb.http.server_node import ServerNode from ravendb.json.result import BatchCommandResult diff --git a/ravendb/documents/conventions.py b/ravendb/documents/conventions.py index bbd514c1..52955349 100644 --- a/ravendb/documents/conventions.py +++ b/ravendb/documents/conventions.py @@ -12,7 +12,11 @@ from typing import TypeVar from ravendb.json.metadata_as_dictionary import MetadataAsDictionary from ravendb.primitives import constants -from ravendb.documents.operations.configuration import ClientConfiguration, LoadBalanceBehavior, ReadBalanceBehavior +from ravendb.documents.operations.configuration.definitions import ( + ClientConfiguration, + LoadBalanceBehavior, + ReadBalanceBehavior, +) from ravendb.documents.indexes.definitions import SortOptions from ravendb.tools.utils import Utils diff --git a/ravendb/documents/indexes/definitions.py b/ravendb/documents/indexes/definitions.py index 465bd867..6c32d704 100644 --- a/ravendb/documents/indexes/definitions.py +++ b/ravendb/documents/indexes/definitions.py @@ -625,7 +625,7 @@ def detect_static_index_source_type(map_str: str) -> IndexSourceType: tokens = [token for token in map_lower.split(" ", 4) if token] - if len(tokens) > 4 and tokens[2].lower() == "in": + if len(tokens) >= 4 and tokens[2].lower() == "in": if tokens[3].startswith("timeseries"): return IndexSourceType.TIME_SERIES if tokens[3].startswith("counters"): diff --git a/ravendb/documents/operations/configuration/__init__.py b/ravendb/documents/operations/configuration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ravendb/documents/operations/configuration/definitions.py b/ravendb/documents/operations/configuration/definitions.py new file mode 100644 index 00000000..c0b54d4c --- /dev/null +++ b/ravendb/documents/operations/configuration/definitions.py @@ -0,0 +1,69 @@ +from __future__ import annotations +from enum import Enum +from typing import Optional, Union + +from ravendb.http.misc import ReadBalanceBehavior, LoadBalanceBehavior + + +class StudioEnvironment(Enum): + NONE = "NONE" + DEVELOPMENT = "DEVELOPMENT" + TESTING = "TESTING" + PRODUCTION = "PRODUCTION" + + +class StudioConfiguration: + def __init__(self, disabled: Optional[bool] = None, environment: Optional[StudioEnvironment] = None): + self.disabled = disabled + self.environment = environment + + +class ClientConfiguration: + def __init__(self): + self.__identity_parts_separator: Union[None, str] = None + self.etag: int = 0 + self.disabled: bool = False + self.max_number_of_requests_per_session: Optional[int] = None + self.read_balance_behavior: Optional[ReadBalanceBehavior] = None + self.load_balance_behavior: Optional[LoadBalanceBehavior] = None + self.load_balancer_context_seed: Optional[int] = None + + @property + def identity_parts_separator(self) -> str: + return self.__identity_parts_separator + + @identity_parts_separator.setter + def identity_parts_separator(self, value: str): + if value is not None and "|" == value: + raise ValueError("Cannot set identity parts separator to '|'") + self.__identity_parts_separator = value + + def to_json(self) -> dict: + return { + "IdentityPartsSeparator": self.__identity_parts_separator, + "Etag": self.etag, + "Disabled": self.disabled, + "MaxNumberOfRequestsPerSession": self.max_number_of_requests_per_session, + "ReadBalanceBehavior": self.read_balance_behavior.value + if self.read_balance_behavior + else ReadBalanceBehavior.NONE, + "LoadBalanceBehavior": self.load_balance_behavior.value + if self.load_balance_behavior + else LoadBalanceBehavior.NONE, + "LoadBalancerContextSeed": self.load_balancer_context_seed, + } + + @classmethod + def from_json(cls, json_dict: dict) -> Optional[ClientConfiguration]: + if json_dict is None: + return None + config = cls() + config.__identity_parts_separator = json_dict["IdentityPartsSeparator"] + config.etag = json_dict["Etag"] + config.disabled = json_dict["Disabled"] + config.max_number_of_requests_per_session = json_dict["MaxNumberOfRequestsPerSession"] + config.read_balance_behavior = ReadBalanceBehavior(json_dict["ReadBalanceBehavior"]) + config.load_balance_behavior = LoadBalanceBehavior(json_dict["LoadBalanceBehavior"]) + config.load_balancer_context_seed = json_dict["LoadBalancerContextSeed"] + + return config diff --git a/ravendb/documents/operations/configuration.py b/ravendb/documents/operations/configuration/operations.py similarity index 65% rename from ravendb/documents/operations/configuration.py rename to ravendb/documents/operations/configuration/operations.py index a0f11efe..efdd429a 100644 --- a/ravendb/documents/operations/configuration.py +++ b/ravendb/documents/operations/configuration/operations.py @@ -1,85 +1,21 @@ from __future__ import annotations import json -from enum import Enum -from typing import Union, Optional, TYPE_CHECKING +from typing import TYPE_CHECKING import requests +from ravendb.documents.operations.configuration.definitions import ClientConfiguration from ravendb.documents.operations.definitions import VoidMaintenanceOperation, MaintenanceOperation from ravendb.http.raven_command import RavenCommand, VoidRavenCommand from ravendb.http.server_node import ServerNode from ravendb.http.topology import RaftCommand -from ravendb.serverwide.operations.common import ServerOperation, VoidServerOperation from ravendb.util.util import RaftIdGenerator -from ravendb.http.misc import ReadBalanceBehavior, LoadBalanceBehavior if TYPE_CHECKING: from ravendb.documents.conventions import DocumentConventions - -class StudioEnvironment(Enum): - NONE = "NONE" - DEVELOPMENT = "DEVELOPMENT" - TESTING = "TESTING" - PRODUCTION = "PRODUCTION" - - -class StudioConfiguration: - def __init__(self, disabled: Optional[bool] = None, environment: Optional[StudioEnvironment] = None): - self.disabled = disabled - self.environment = environment - - -class ClientConfiguration: - def __init__(self): - self.__identity_parts_separator: Union[None, str] = None - self.etag: int = 0 - self.disabled: bool = False - self.max_number_of_requests_per_session: Union[None, int] = None - self.read_balance_behavior: Union[None, "ReadBalanceBehavior"] = None - self.load_balance_behavior: Union[None, "LoadBalanceBehavior"] = None - self.load_balancer_context_seed: Union[None, int] = None - - @property - def identity_parts_separator(self) -> str: - return self.__identity_parts_separator - - @identity_parts_separator.setter - def identity_parts_separator(self, value: str): - if value is not None and "|" == value: - raise ValueError("Cannot set identity parts separator to '|'") - self.__identity_parts_separator = value - - def to_json(self) -> dict: - return { - "IdentityPartsSeparator": self.__identity_parts_separator, - "Etag": self.etag, - "Disabled": self.disabled, - "MaxNumberOfRequestsPerSession": self.max_number_of_requests_per_session, - "ReadBalanceBehavior": self.read_balance_behavior.value - if self.read_balance_behavior - else ReadBalanceBehavior.NONE, - "LoadBalanceBehavior": self.load_balance_behavior.value - if self.load_balance_behavior - else LoadBalanceBehavior.NONE, - "LoadBalancerContextSeed": self.load_balancer_context_seed, - } - - @classmethod - def from_json(cls, json_dict: dict) -> Optional[ClientConfiguration]: - if json_dict is None: - return None - config = cls() - config.__identity_parts_separator = json_dict["IdentityPartsSeparator"] - config.etag = json_dict["Etag"] - config.disabled = json_dict["Disabled"] - config.max_number_of_requests_per_session = json_dict["MaxNumberOfRequestsPerSession"] - config.read_balance_behavior = ReadBalanceBehavior(json_dict["ReadBalanceBehavior"]) - config.load_balance_behavior = LoadBalanceBehavior(json_dict["LoadBalanceBehavior"]) - config.load_balancer_context_seed = json_dict["LoadBalancerContextSeed"] - - return config +from ravendb.serverwide.operations.common import ServerOperation, VoidServerOperation class GetClientConfigurationOperation(MaintenanceOperation): diff --git a/ravendb/documents/operations/time_series.py b/ravendb/documents/operations/time_series.py index efc383d2..5795a6b7 100644 --- a/ravendb/documents/operations/time_series.py +++ b/ravendb/documents/operations/time_series.py @@ -2,7 +2,7 @@ import datetime import json -from typing import Dict, Optional, List, Any, TYPE_CHECKING, Callable +from typing import Dict, Optional, List, Any, TYPE_CHECKING, Callable, Set import requests from ravendb.primitives.constants import int_max @@ -14,7 +14,7 @@ from ravendb.http.raven_command import RavenCommand, VoidRavenCommand from ravendb.documents.operations.definitions import MaintenanceOperation, IOperation, VoidOperation from ravendb.primitives.time_series import TimeValue -from ravendb.tools.utils import Utils +from ravendb.tools.utils import Utils, CaseInsensitiveDict from ravendb.util.util import RaftIdGenerator from ravendb.documents.conventions import DocumentConventions @@ -43,6 +43,14 @@ def __init__( self.name = name + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> TimeSeriesPolicy: + return cls( + json_dict["Name"], + TimeValue.from_json(json_dict["AggregationTime"]), + TimeValue.from_json(json_dict["RetentionTime"]), + ) + def get_time_series_name(self, raw_name: str) -> str: return raw_name + TimeSeriesConfiguration.TIME_SERIES_ROLLUP_SEPARATOR + self.name @@ -57,6 +65,10 @@ def to_json(self) -> Dict[str, Any]: class RawTimeSeriesPolicy(TimeSeriesPolicy): POLICY_STRING = "rawpolicy" # must be lower case + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> RawTimeSeriesPolicy: + return cls(TimeValue.from_json(json_dict["RetentionTime"])) + @classmethod def DEFAULT_POLICY(cls) -> RawTimeSeriesPolicy: return cls(TimeValue.MAX_VALUE()) @@ -78,6 +90,14 @@ def __init__( self.policies = policies self.raw_policy = raw_policy + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> TimeSeriesCollectionConfiguration: + return cls( + json_dict["Disabled"], + [TimeSeriesPolicy.from_json(policy_json) for policy_json in json_dict["Policies"]], + RawTimeSeriesPolicy.from_json(json_dict["RawPolicy"]), + ) + def to_json(self) -> Dict[str, Any]: return { "Disabled": self.disabled, @@ -92,15 +112,18 @@ class TimeSeriesConfiguration: @classmethod def from_json( cls, - collections: Dict[str, TimeSeriesCollectionConfiguration], - policy_check_frequency: datetime.timedelta, - named_values: Dict[str, Dict[str, List[str]]], + json_dict: Dict[str, Any] = None, ) -> TimeSeriesConfiguration: configuration = cls() - configuration.collections = collections - configuration.policy_check_frequency = policy_check_frequency - configuration.named_values = named_values - + configuration.collections = { + key: TimeSeriesCollectionConfiguration.from_json(value) for key, value in json_dict["Collections"].items() + } + configuration.policy_check_frequency = ( + Utils.string_to_timedelta(json_dict["PolicyCheckFrequency"]) + if "PolicyCheckFrequency" in json_dict and json_dict["PolicyCheckFrequency"] + else None + ) + configuration.named_values = json_dict["NamedValues"] configuration._internal_post_json_deserialization() return configuration @@ -116,6 +139,48 @@ def to_json(self) -> Dict[str, Any]: "NamedValues": self.named_values, } + def get_names(self, collection: str, time_series: str) -> Optional[List[str]]: + if self.named_values is None: + return None + + ts_holder = self.named_values.get(collection, None) + if ts_holder is None: + return None + + names = ts_holder.get(time_series, None) + if names is None: + return None + + return names + + def _internal_post_json_deserialization(self) -> None: + self._populate_named_values() + self._populate_policies() + + def _populate_policies(self) -> None: + if self.collections is None: + return + + dic = CaseInsensitiveDict() + for key, value in self.collections.items(): + dic[key] = value + + self.collections = dic + + def _populate_named_values(self) -> None: + if self.named_values is None: + return + + # ensure ignore case + dic = CaseInsensitiveDict() + + for key, value in self.named_values.items(): + value_map = CaseInsensitiveDict() + value_map.update(value) + dic[key] = value_map + + self.named_values = dic + class ConfigureTimeSeriesOperationResult: def __init__(self, raft_command_index: int): @@ -292,6 +357,12 @@ def __init__(self, timestamp: datetime.datetime, values: List[float], tag: Optio self.values = values self.tag = tag + def __eq__(self, other): + return isinstance(other, TimeSeriesOperation.AppendOperation) and other.timestamp == self.timestamp + + def __hash__(self): + return hash(self.timestamp) + def to_json(self) -> Dict[str, Any]: json_dict = { "Timestamp": Utils.datetime_to_string(self.timestamp), @@ -317,7 +388,7 @@ def to_json(self) -> Dict[str, Any]: def __init__(self, name: Optional[str] = None): self.name = name - self._appends: List[TimeSeriesOperation.AppendOperation] = [] + self._appends: Set[TimeSeriesOperation.AppendOperation] = set() self._deletes: List[TimeSeriesOperation.DeleteOperation] = [] def to_json(self) -> Dict[str, Any]: @@ -330,14 +401,13 @@ def to_json(self) -> Dict[str, Any]: def append(self, append_operation: AppendOperation) -> None: if self._appends is None: - self._appends = [] # todo: perf - filtered = self._appends + self._appends = set() - # if len(filtered) != 0: - # # element with given timestamp already exists - remove and retry add operation - # self._appends.remove(filtered.pop()) + if append_operation in self._appends: + # __eq__ override lets us discard old one by passing new one due to the same timestamps + self._appends.discard(append_operation) - self._appends.append(append_operation) + self._appends.add(append_operation) def delete(self, delete_operation: DeleteOperation) -> None: if self._deletes is None: @@ -367,7 +437,7 @@ def from_json(cls, json_dict: Dict[str, Any]) -> TimeSeriesRangeResult: Utils.string_to_datetime(json_dict["From"]), Utils.string_to_datetime(json_dict["To"]), [TimeSeriesEntry.from_json(entry_json) for entry_json in json_dict["Entries"]], - json_dict["TotalResults"] if "TotalResults" in json_dict else 0, + json_dict["TotalResults"] if "TotalResults" in json_dict else None, json_dict.get("Includes", None), ) @@ -554,11 +624,11 @@ def create_request(self, node: ServerNode) -> requests.Request: if self._start > 0: path_builder.append("&start=") - path_builder.append(self._start) + path_builder.append(str(self._start)) if self._page_size < int_max: path_builder.append("&pageSize=") - path_builder.append(self._page_size) + path_builder.append(str(self._page_size)) if not self._ranges: raise ValueError("Ranges cannot be None or empty") diff --git a/ravendb/documents/queries/time_series.py b/ravendb/documents/queries/time_series.py index a5e48866..b2467dc7 100644 --- a/ravendb/documents/queries/time_series.py +++ b/ravendb/documents/queries/time_series.py @@ -25,7 +25,8 @@ def __init__(self, count: Optional[int] = None, results: Optional[List[TimeSerie @classmethod def from_json(cls, json_dict: Dict[str, Any]) -> TimeSeriesQueryResult: - json_dict = json_dict["__timeSeriesQueryFunction"] + if "Count" not in json_dict: + json_dict = json_dict["__timeSeriesQueryFunction"] return cls( json_dict["Count"], [TimeSeriesEntry.from_json(time_series_entry_json) for time_series_entry_json in json_dict["Results"]], @@ -182,8 +183,9 @@ def as_typed_result( return result @classmethod - def from_json(cls, json_dict: Dict[str, Any]) -> TimeSeriesQueryResult: - json_dict = json_dict["__timeSeriesQueryFunction"] + def from_json(cls, json_dict: Dict[str, Any]) -> TimeSeriesAggregationResult: + if "Count" not in json_dict: + json_dict = json_dict["__timeSeriesQueryFunction"] return cls( json_dict["Count"], [TimeSeriesRangeAggregation.from_json(result) for result in json_dict["Results"]] ) diff --git a/ravendb/documents/session/document_session.py b/ravendb/documents/session/document_session.py index 7e52aac6..db80dd5a 100644 --- a/ravendb/documents/session/document_session.py +++ b/ravendb/documents/session/document_session.py @@ -1913,9 +1913,13 @@ def _serve_from_cache( from_range_index = -1 ranges_to_get_from_server: Optional[List[TimeSeriesRange]] = None to_range_index = -1 + cache_includes_whole_range = False - for i in range(len(ranges)): + while True: to_range_index += 1 + if to_range_index >= len(ranges): + break + if TSRangeHelper.left(ranges[to_range_index].from_date) <= TSRangeHelper.left(from_date): if ( TSRangeHelper.right(ranges[to_range_index].to_date) >= TSRangeHelper.right(to_date) @@ -1941,7 +1945,7 @@ def _serve_from_cache( from_date if ( to_range_index == 0 - or TSRangeHelper.left(from_date) < TSRangeHelper.right(ranges[to_range_index - 1].to_date) + or TSRangeHelper.right(ranges[to_range_index - 1].to_date) < TSRangeHelper.left(from_date) ) else ranges[to_range_index - 1].to_date ) @@ -1954,9 +1958,10 @@ def _serve_from_cache( ranges_to_get_from_server.append(TimeSeriesRange(self.name, from_to_use, to_to_use)) if TSRangeHelper.right(ranges[to_range_index].to_date) >= TSRangeHelper.right(to_date): + cache_includes_whole_range = True break - if to_range_index == len(ranges) - 1: + if not cache_includes_whole_range: # requested_range [from, to] ends after all ranges in cache # add the missing part between the last range end and 'to' # to the list of ranges we need to get from server @@ -1986,16 +1991,16 @@ def _serve_from_cache( ) if not self.session.no_tracking: - from_date = min( - [ - from_date - for from_date in [x.from_date for x in details.values.get(self.name)] - if from_date is not None - ] - ) - to_date = max( - [to_date for to_date in [x.to_date for x in details.values.get(self.name)] if to_date is not None] - ) + from_dates = [ + from_date for from_date in [x.from_date for x in details.values.get(self.name)] if from_date is not None + ] + from_date = None if not any(from_dates) else min(from_dates) + + to_dates = [ + to_date for to_date in [x.to_date for x in details.values.get(self.name)] if to_date is not None + ] + to_date = None if not any(to_dates) else max(to_dates) + InMemoryDocumentSessionOperations.add_to_cache( self.name, from_date, to_date, from_range_index, to_range_index, ranges, cache, merged_values ) @@ -2200,9 +2205,9 @@ def get( start: int = 0, page_size: int = int_max, ) -> Optional[List[TimeSeriesEntry]]: - return self.get_include(from_date, to_date, None, start, page_size) + return self.get_with_include(from_date, to_date, None, start, page_size) - def get_include( + def get_with_include( self, from_date: Optional[datetime] = None, to_date: Optional[datetime] = None, @@ -2276,6 +2281,9 @@ def append(self, timestamp: datetime, entry: _T_TS_Values_Bindable, tag: Optiona def append_entry(self, entry: TypedTimeSeriesEntry[_T_TS_Values_Bindable]) -> None: self.append(entry.timestamp, entry.value, entry.tag) + def append_single(self, timestamp: datetime, value: _T_TS_Values_Bindable, tag: Optional[str] = None) -> None: + self.append(timestamp, value, tag) + class SessionDocumentRollupTypedTimeSeries(SessionTimeSeriesBase, Generic[_T_TS_Values_Bindable]): def __init__( diff --git a/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py b/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py index fd96674d..1ed59127 100644 --- a/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py +++ b/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py @@ -635,7 +635,7 @@ def counters_by_doc_id(self): return self._counters_by_doc_id @property - def time_series_by_doc_id(self): + def time_series_by_doc_id(self) -> Dict[str, Dict[str, List[TimeSeriesRangeResult]]]: return self._time_series_by_doc_id @property diff --git a/ravendb/documents/session/loaders/include.py b/ravendb/documents/session/loaders/include.py index 3d59493f..70ace43b 100644 --- a/ravendb/documents/session/loaders/include.py +++ b/ravendb/documents/session/loaders/include.py @@ -1,10 +1,9 @@ from __future__ import annotations import datetime -from typing import Set, Tuple, Dict, Union, Optional, List +from typing import Set, Tuple, Dict, Union, Optional, List, TYPE_CHECKING from ravendb.primitives import constants -from ravendb.documents.conventions import DocumentConventions from ravendb.primitives.time_series import TimeValue from ravendb.tools.utils import CaseInsensitiveDict, CaseInsensitiveSet @@ -15,6 +14,8 @@ TimeSeriesCountRange, ) +from ravendb.documents.conventions import DocumentConventions + class IncludeBuilderBase: def __init__(self, conventions: DocumentConventions): @@ -140,7 +141,6 @@ def _assert_valid(self, alias: str, name: str) -> None: "after using 'includeAllTimeSeries'." ) - # todo: more time series methods def _include_time_series_from_to( self, alias: str, name: str, from_date: datetime.datetime, to_date: datetime.datetime ): @@ -212,7 +212,7 @@ def _include_array_of_time_series_by_range_type_and_time( @staticmethod def _assert_valid_type_and_count(type_: TimeSeriesRangeType, count: int) -> None: if type_ == TimeSeriesRangeType.NONE: - raise ValueError("Time range type cannot be set to NONE when time is specified.") + raise ValueError("Time range type cannot be set to NONE when count is specified.") elif type_ == TimeSeriesRangeType.LAST: if count <= 0: raise ValueError("Count have to be positive") diff --git a/ravendb/documents/session/query.py b/ravendb/documents/session/query.py index c58b2151..94fdae4d 100644 --- a/ravendb/documents/session/query.py +++ b/ravendb/documents/session/query.py @@ -2260,6 +2260,9 @@ def take(self, count: int) -> RawDocumentQuery[_T]: self._take(count) return self + def first(self) -> _T: + return list(self.take(1))[0] + def wait_for_non_stale_results(self, wait_timeout: Optional[datetime.timedelta] = None): self._wait_for_non_stale_results(wait_timeout) return self diff --git a/ravendb/documents/session/time_series.py b/ravendb/documents/session/time_series.py index bf7ad496..050bf6bf 100644 --- a/ravendb/documents/session/time_series.py +++ b/ravendb/documents/session/time_series.py @@ -292,6 +292,22 @@ def get_fields_mapping(ts_bindable_object_type: Type[_T_TSBindable]) -> Optional ) ts_bindable_type_mapping = ts_bindable_class_instance.get_time_series_mapping() + mappings = ts_bindable_type_mapping + mapping_keys = list(ts_bindable_type_mapping.keys()) + if mapping_keys[0] != 0 or mapping_keys[-1] != len(mapping_keys) - 1: + raise RuntimeError( + f"The mapping of '{ts_bindable_object_type.__name__ }' " + f"must contain consecutive values starting from 0." + ) + + for key, (name, tag) in mappings.items(): + class_instance_value = ts_bindable_class_instance.__dict__[name] + if not isinstance(class_instance_value, float) and class_instance_value is not None: + raise RuntimeError( + f"Cannot create a mapping for '{ts_bindable_object_type.__name__}' class, " + f"because field '{name}' is not a float" + ) + new_cache_entry = {} for idx, field_name_and_ts_value_name in ts_bindable_type_mapping.items(): field_name = field_name_and_ts_value_name[0] diff --git a/ravendb/http/request_executor.py b/ravendb/http/request_executor.py index 6cb15389..0b4cad3e 100644 --- a/ravendb/http/request_executor.py +++ b/ravendb/http/request_executor.py @@ -21,7 +21,7 @@ AuthorizationException, RequestedNodeUnavailableException, ) -from ravendb.documents.operations.configuration import GetClientConfigurationOperation +from ravendb.documents.operations.configuration.operations import GetClientConfigurationOperation from ravendb.exceptions.exception_dispatcher import ExceptionDispatcher from ravendb.exceptions.raven_exceptions import ClientVersionMismatchException diff --git a/ravendb/primitives/constants.py b/ravendb/primitives/constants.py index ea8fca1a..0a2320b2 100644 --- a/ravendb/primitives/constants.py +++ b/ravendb/primitives/constants.py @@ -1,7 +1,7 @@ import sys -int_min = 0x80000000 int_max = 0x7FFFFFFF +int_min = -int_max - 1 min_normal = sys.float_info.min json_serialize_method_name = "to_json" nan_value = float("nan") diff --git a/ravendb/primitives/time_series.py b/ravendb/primitives/time_series.py index 0460d31e..92a49074 100644 --- a/ravendb/primitives/time_series.py +++ b/ravendb/primitives/time_series.py @@ -22,8 +22,12 @@ def __init__(self, value: int, unit: TimeValueUnit): self.value = value self.unit = unit + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> TimeValue: + return cls(json_dict["Value"], TimeValueUnit(json_dict["Unit"])) + def to_json(self) -> Dict[str, Any]: - return {"Value": self.value, "Unit": self.unit} + return {"Value": self.value, "Unit": self.unit.value} def __str__(self): if self.value == int_max: @@ -194,7 +198,7 @@ def _trim_compare_result(result: int) -> int: return result def __eq__(self, o: object): - if self == o: + if id(self) == id(o): return True if o is None or self.__class__ != o.__class__: return False diff --git a/ravendb/serverwide/database_record.py b/ravendb/serverwide/database_record.py index 51730378..54358f6d 100644 --- a/ravendb/serverwide/database_record.py +++ b/ravendb/serverwide/database_record.py @@ -28,6 +28,8 @@ from ravendb.documents.queries.sorting import SorterDefinition +from ravendb.documents.operations.time_series import TimeSeriesConfiguration + if TYPE_CHECKING: from ravendb.serverwide import ( ConflictSolver, @@ -36,7 +38,6 @@ DatabaseTopology, ) from ravendb.documents.operations.configuration import ClientConfiguration, StudioConfiguration - from ravendb.documents.operations.time_series import TimeSeriesConfiguration class DatabaseRecord: @@ -45,19 +46,19 @@ def __init__(self, database_name: Optional[str] = None): self.disabled: bool = False self.encrypted: bool = False self.settings: Dict[str, str] = {} - self.conflict_solver_config: Union[None, ConflictSolver] = None - self.documents_compression: Union[None, DocumentsCompressionConfiguration] = None - self.etag_for_backup: Union[None, int] = None - self.deletion_in_progress: Union[None, Dict[str, DeletionInProgressStatus]] = None - self.rolling_indexes: Union[None, Dict[str, RollingIndex]] = None - self.topology: Union[None, DatabaseTopology] = None + self.conflict_solver_config: Optional[ConflictSolver] = None + self.documents_compression: Optional[DocumentsCompressionConfiguration] = None + self.etag_for_backup: Optional[int] = None + self.deletion_in_progress: Optional[Dict[str, DeletionInProgressStatus]] = None + self.rolling_indexes: Optional[Dict[str, RollingIndex]] = None + self.topology: Optional[DatabaseTopology] = None self.sorters: Dict[str, SorterDefinition] = {} self.analyzers: Dict[str, AnalyzerDefinition] = {} - self.indexes: Union[None, Dict[str, IndexDefinition]] = {} - self.auto_indexes: Union[None, Dict[str, AutoIndexDefinition]] = None - self.revisions: Union[None, RevisionsConfiguration] = None - self.time_series: Union[None, "TimeSeriesConfiguration"] = None - self.expiration: Union[None, ExpirationConfiguration] = None + self.indexes: Optional[Dict[str, IndexDefinition]] = {} + self.auto_indexes: Optional[Dict[str, AutoIndexDefinition]] = None + self.revisions: Optional[RevisionsConfiguration] = None + self.time_series: Optional["TimeSeriesConfiguration"] = None + self.expiration: Optional[ExpirationConfiguration] = None self.periodic_backups: List[PeriodicBackupConfiguration] = [] self.external_replications: List[ExternalReplication] = [] self.sink_pull_replications: List[PullReplicationAsSink] = [] @@ -68,15 +69,15 @@ def __init__(self, database_name: Optional[str] = None): self.raven_etls: List[RavenEtlConfiguration] = [] self.sql_etls: List[SqlEtlConfiguration] = [] self.olap_etls: List[OlapEtlConfiguration] = [] - self.client: Union[None, ClientConfiguration] = None - self.studio: Union[None, StudioConfiguration] = None + self.client: Optional[ClientConfiguration] = None + self.studio: Optional[StudioConfiguration] = None self.truncated_cluster_transaction_commands_count: int = 0 - self.database_state: Union[None, DatabaseRecord.DatabaseStateStatus] = None - self.lock_mode: Union[None, DatabaseRecord.DatabaseLockMode] = None - self.indexes_history_story: Union[None, List[DatabaseRecord.IndexHistoryEntry]] = None - self.revisions_for_conflicts: Union[None, RevisionsCollectionConfiguration] = None - self.refresh: Union[None, RefreshConfiguration] = None - self.unused_database_ids: Union[None, Set[str]] = None + self.database_state: Optional[DatabaseRecord.DatabaseStateStatus] = None + self.lock_mode: Optional[DatabaseRecord.DatabaseLockMode] = None + self.indexes_history_story: Optional[List[DatabaseRecord.IndexHistoryEntry]] = None + self.revisions_for_conflicts: Optional[RevisionsCollectionConfiguration] = None + self.refresh: Optional[RefreshConfiguration] = None + self.unused_database_ids: Optional[Set[str]] = None def to_json(self): return { @@ -142,7 +143,8 @@ def from_json(cls, json_dict: dict) -> DatabaseRecord: } record.settings = json_dict.get("Settings", None) record.revisions = json_dict.get("Revisions", None) - record.time_series = json_dict.get("TimeSeries", None) + if json_dict.get("TimeSeries", None): + record.time_series = TimeSeriesConfiguration.from_json(json_dict["TimeSeries"]) record.revisions_for_conflicts = json_dict.get("RevisionsForConflicts", None) record.expiration = json_dict.get("Expiration", None) record.refresh = json_dict.get("Refresh", None) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/documents_tests/operations_tests/configuration/test_client_configuration.py b/ravendb/tests/jvm_migrated_tests/client_tests/documents_tests/operations_tests/configuration/test_client_configuration.py index 76b82667..e931d22f 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/documents_tests/operations_tests/configuration/test_client_configuration.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/documents_tests/operations_tests/configuration/test_client_configuration.py @@ -1,5 +1,5 @@ -from ravendb.documents.operations.configuration import ( - ClientConfiguration, +from ravendb.documents.operations.configuration.definitions import ClientConfiguration +from ravendb.documents.operations.configuration.operations import ( PutServerWideClientConfigurationOperation, GetServerWideClientConfigurationOperation, GetClientConfigurationOperation, diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_configuration.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_configuration.py new file mode 100644 index 00000000..b4827592 --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_configuration.py @@ -0,0 +1,290 @@ +import json +import unittest + +from ravendb import GetDatabaseRecordOperation +from ravendb.documents.operations.time_series import ( + ConfigureTimeSeriesOperation, + TimeSeriesConfiguration, + TimeSeriesCollectionConfiguration, + TimeSeriesPolicy, + RawTimeSeriesPolicy, + ConfigureTimeSeriesPolicyOperation, + ConfigureRawTimeSeriesPolicyOperation, + ConfigureTimeSeriesValueNamesOperation, +) +from ravendb.infrastructure.entities import User +from ravendb.primitives.time_series import TimeValue, TimeValueUnit +from ravendb.tests.test_base import TestBase + + +class TestTimeSeriesConfiguration(TestBase): + def setUp(self): + super().setUp() + + def test_serialization(self): + self.assertEqual("{'Value': 7200, 'Unit': 'Second'}", str(TimeValue.of_hours(2).to_json())) + + def test_deserialization(self): + time_value = TimeValue.from_json(json.loads('{"Value":7200,"Unit":"Second"}')) + self.assertEqual(TimeValueUnit.SECOND, time_value.unit) + self.assertEqual(7200, time_value.value) + + time_value = TimeValue.from_json(json.loads('{"Value":2,"Unit":"Month"}')) + self.assertEqual(TimeValueUnit.MONTH, time_value.unit) + self.assertEqual(2, time_value.value) + + time_value = TimeValue.from_json(json.loads('{"Value":0,"Unit":"None"}')) + self.assertEqual(TimeValueUnit.NONE, time_value.unit) + self.assertEqual(0, time_value.value) + + @unittest.skip("Disable on pull request") + def test_can_configure_time_series(self): + config = TimeSeriesConfiguration() + self.store.maintenance.send(ConfigureTimeSeriesOperation(config)) + + config.collections = {} + self.store.maintenance.send(ConfigureTimeSeriesOperation(config)) + + config.collections["Users"] = TimeSeriesCollectionConfiguration() + self.store.maintenance.send(ConfigureTimeSeriesOperation(config)) + + users = config.collections.get("Users") + users.policies = [ + TimeSeriesPolicy("ByHourFor12Hours", TimeValue.of_hours(1), TimeValue.of_hours(48)), + TimeSeriesPolicy("ByMinuteFor3Hours", TimeValue.of_minutes(1), TimeValue.of_minutes(180)), + TimeSeriesPolicy("BySecondFor1Minute", TimeValue.of_seconds(1), TimeValue.of_seconds(60)), + TimeSeriesPolicy("ByMonthFor1Year", TimeValue.of_months(1), TimeValue.of_years(1)), + TimeSeriesPolicy("ByYearFor3Years", TimeValue.of_years(1), TimeValue.of_years(3)), + TimeSeriesPolicy("ByDayFor1Month", TimeValue.of_days(1), TimeValue.of_months(1)), + ] + + self.store.maintenance.send(ConfigureTimeSeriesOperation(config)) + + users.raw_policy = RawTimeSeriesPolicy(TimeValue.of_hours(96)) + self.store.maintenance.send(ConfigureTimeSeriesOperation(config)) + + updated: TimeSeriesConfiguration = self.store.maintenance.server.send( + GetDatabaseRecordOperation(self.store.database) + ).time_series + + collection = updated.collections.get("Users") + policies = collection.policies + self.assertEqual(6, len(policies)) + + self.assertEqual(TimeValue.of_seconds(60), policies[0].retention_time) + self.assertEqual(TimeValue.of_seconds(1), policies[0].aggregation_time) + + self.assertEqual(TimeValue.of_minutes(180), policies[1].retention_time) + self.assertEqual(TimeValue.of_minutes(1), policies[1].aggregation_time) + + self.assertEqual(TimeValue.of_hours(48), policies[2].retention_time) + self.assertEqual(TimeValue.of_hours(1), policies[2].aggregation_time) + + self.assertEqual(TimeValue.of_months(1), policies[3].retention_time) + self.assertEqual(TimeValue.of_days(1), policies[3].aggregation_time) + + self.assertEqual(TimeValue.of_years(1), policies[4].retention_time) + self.assertEqual(TimeValue.of_months(1), policies[4].aggregation_time) + + self.assertEqual(TimeValue.of_years(3), policies[5].retention_time) + self.assertEqual(TimeValue.of_years(1), policies[5].aggregation_time) + + @unittest.skip("Disable on pull request") + def test_can_configure_time_series_2(self): + collection_name = "Users" + + p1 = TimeSeriesPolicy("BySecondFor1Minute", TimeValue.of_seconds(1), TimeValue.of_seconds(60)) + p2 = TimeSeriesPolicy("ByMinuteFor3Hours", TimeValue.of_minutes(1), TimeValue.of_minutes(180)) + p3 = TimeSeriesPolicy("ByHourFor12Hours", TimeValue.of_hours(1), TimeValue.of_hours(48)) + p4 = TimeSeriesPolicy("ByDayFor1Month", TimeValue.of_days(1), TimeValue.of_months(1)) + p5 = TimeSeriesPolicy("ByMonthFor1Year", TimeValue.of_months(1), TimeValue.of_years(1)) + p6 = TimeSeriesPolicy("ByYearFor3Years", TimeValue.of_years(1), TimeValue.of_years(3)) + + policies = [p1, p2, p3, p4, p5, p6] + + for policy in policies: + self.store.maintenance.send(ConfigureTimeSeriesPolicyOperation(collection_name, policy)) + + self.store.maintenance.send( + ConfigureRawTimeSeriesPolicyOperation(collection_name, RawTimeSeriesPolicy(TimeValue.of_hours(96))) + ) + + parameters = ConfigureTimeSeriesValueNamesOperation.Parameters(None, None, None, None) + parameters.collection = collection_name + parameters.time_series = "HeartRate" + parameters.value_names = ["HeartRate"] + parameters.update = True + + name_config = ConfigureTimeSeriesValueNamesOperation(parameters) + self.store.maintenance.send(name_config) + + updated: TimeSeriesConfiguration = self.store.maintenance.server.send( + GetDatabaseRecordOperation(self.store.database) + ).time_series + + collection = updated.collections.get(collection_name) + policies = collection.policies + + self.assertEqual(6, len(policies)) + + self.assertEqual(TimeValue.of_seconds(60), policies[0].retention_time) + self.assertEqual(TimeValue.of_seconds(1), policies[0].aggregation_time) + + self.assertEqual(TimeValue.of_minutes(180), policies[1].retention_time) + self.assertEqual(TimeValue.of_minutes(1), policies[1].aggregation_time) + + self.assertEqual(TimeValue.of_hours(48), policies[2].retention_time) + self.assertEqual(TimeValue.of_hours(1), policies[2].aggregation_time) + + self.assertEqual(TimeValue.of_months(1), policies[3].retention_time) + self.assertEqual(TimeValue.of_days(1), policies[3].aggregation_time) + + self.assertEqual(TimeValue.of_years(1), policies[4].retention_time) + self.assertEqual(TimeValue.of_months(1), policies[4].aggregation_time) + + self.assertEqual(TimeValue.of_years(3), policies[5].retention_time) + self.assertEqual(TimeValue.of_years(1), policies[5].aggregation_time) + + def test_not_valid_configure_should_throw(self): + config = TimeSeriesConfiguration() + collections_config = {} + config.collections = collections_config + + time_series_collection_configuration = TimeSeriesCollectionConfiguration() + collections_config["Users"] = time_series_collection_configuration + + time_series_collection_configuration.raw_policy = RawTimeSeriesPolicy(TimeValue.of_months(1)) + time_series_collection_configuration.policies = [ + TimeSeriesPolicy("By30DaysFor5Years", TimeValue.of_days(30), TimeValue.of_years(5)) + ] + + self.assertRaisesWithMessage( + self.store.maintenance.send, + Exception, + "Unable to compare 1 month with 30 days, since a month might have different number of days.", + ConfigureTimeSeriesOperation(config), + ) + + config2 = TimeSeriesConfiguration() + collections_config = {} + config2.collections = collections_config + + time_series_collection_configuration = TimeSeriesCollectionConfiguration() + collections_config["Users"] = time_series_collection_configuration + + time_series_collection_configuration.raw_policy = RawTimeSeriesPolicy(TimeValue.of_months(12)) + time_series_collection_configuration.policies = [ + TimeSeriesPolicy("By365DaysFor5Years", TimeValue.of_seconds(365 * 24 * 3600), TimeValue.of_years(5)) + ] + + self.assertRaisesWithMessage( + self.store.maintenance.send, + Exception, + "Unable to compare 1 year with 365 days, since a month might have different number of days.", + ConfigureTimeSeriesOperation(config2), + ) + + config3 = TimeSeriesConfiguration() + collections_config = {} + config3.collections = collections_config + + time_series_collection_configuration = TimeSeriesCollectionConfiguration() + collections_config["Users"] = time_series_collection_configuration + + time_series_collection_configuration.raw_policy = RawTimeSeriesPolicy(TimeValue.of_months(1)) + time_series_collection_configuration.policies = [ + TimeSeriesPolicy("By27DaysFor1Year", TimeValue.of_days(27), TimeValue.of_years(1)), + TimeSeriesPolicy("By364daysFor5Years", TimeValue.of_days(364), TimeValue.of_years(5)), + ] + + self.assertRaisesWithMessage( + self.store.maintenance.send, + Exception, + "The aggregation time of the policy 'By364daysFor5Years' (364 days) " + "must be divided by the aggregation time of 'By27DaysFor1Year' (27 days) without a remainder.", + ConfigureTimeSeriesOperation(config3), + ) + + @unittest.skip("Disable on pull request") + def test_configure_time_series_3(self): + self.store.time_series.set_policy( + User, "By15SecondsFor1Minute", TimeValue.of_seconds(15), TimeValue.of_seconds(60) + ) + self.store.time_series.set_policy(User, "ByMinuteFor3Hours", TimeValue.of_minutes(1), TimeValue.of_minutes(180)) + self.store.time_series.set_policy(User, "ByHourFor12Hours", TimeValue.of_hours(1), TimeValue.of_hours(48)) + self.store.time_series.set_policy(User, "ByDayFor1Month", TimeValue.of_days(1), TimeValue.of_months(1)) + self.store.time_series.set_policy(User, "ByMonthFor1Year", TimeValue.of_months(1), TimeValue.of_years(1)) + self.store.time_series.set_policy(User, "ByYearFor3Years", TimeValue.of_years(1), TimeValue.of_years(3)) + + updated: TimeSeriesConfiguration = self.store.maintenance.server.send( + GetDatabaseRecordOperation(self.store.database) + ).time_series + collection = updated.collections.get("Users") + policies = collection.policies + + self.assertEqual(6, len(policies)) + + self.assertEqual(6, len(policies)) + + self.assertEqual(TimeValue.of_seconds(60), policies[0].retention_time) + self.assertEqual(TimeValue.of_seconds(15), policies[0].aggregation_time) + + self.assertEqual(TimeValue.of_minutes(180), policies[1].retention_time) + self.assertEqual(TimeValue.of_minutes(1), policies[1].aggregation_time) + + self.assertEqual(TimeValue.of_hours(48), policies[2].retention_time) + self.assertEqual(TimeValue.of_hours(1), policies[2].aggregation_time) + + self.assertEqual(TimeValue.of_months(1), policies[3].retention_time) + self.assertEqual(TimeValue.of_days(1), policies[3].aggregation_time) + + self.assertEqual(TimeValue.of_years(1), policies[4].retention_time) + self.assertEqual(TimeValue.of_months(1), policies[4].aggregation_time) + + self.assertEqual(TimeValue.of_years(3), policies[5].retention_time) + self.assertEqual(TimeValue.of_years(1), policies[5].aggregation_time) + + self.assertRaisesWithMessage( + self.store.time_series.remove_policy, + Exception, + "The policy 'By15SecondsFor1Minute' has a retention time of '60 seconds' " + "but should be aggregated by policy 'ByHourFor12Hours' with the aggregation time frame of 60 minutes", + User, + "ByMinuteFor3Hours", + ) + + self.assertRaisesWithMessage( + self.store.time_series.set_raw_policy, + Exception, + "The policy 'rawpolicy' has a retention time of '10 seconds' but should be aggregated by policy " + "'By15SecondsFor1Minute' with the aggregation time frame of 15 seconds", + User, + TimeValue.of_seconds(10), + ) + + self.store.time_series.set_raw_policy(User, TimeValue.of_minutes(120)) + self.store.time_series.set_policy( + User, "By15SecondsFor1Minute", TimeValue.of_seconds(30), TimeValue.of_seconds(120) + ) + + updated: TimeSeriesConfiguration = self.store.maintenance.server.send( + GetDatabaseRecordOperation(self.store.database) + ).time_series + collection = updated.collections.get("Users") + policies = collection.policies + + self.assertEqual(6, len(policies)) + self.assertEqual(TimeValue.of_seconds(120), policies[0].retention_time) + self.assertEqual(TimeValue.of_seconds(30), policies[0].aggregation_time) + + self.store.time_series.remove_policy(User, "By15SecondsFor1Minute") + + updated: TimeSeriesConfiguration = self.store.maintenance.server.send( + GetDatabaseRecordOperation(self.store.database) + ).time_series + collection = updated.collections.get("Users") + policies = collection.policies + + self.assertEqual(5, len(policies)) + + self.store.time_series.remove_policy(User, RawTimeSeriesPolicy.POLICY_STRING) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py index a35c2065..629f9bf2 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py @@ -2,6 +2,7 @@ from ravendb.documents.session.time_series import TimeSeriesRangeType from ravendb.infrastructure.orders import Company, Order +from ravendb.primitives.constants import int_max from ravendb.primitives.time_series import TimeValue from ravendb.tests.test_base import TestBase, User @@ -872,3 +873,314 @@ def test_multi_load_with_include_time_series(self): self.assertEqual(91, len(vals)) self.assertEqual(base_line, vals[0].timestamp) self.assertEqual(base_line + timedelta(minutes=30), vals[90].timestamp) + + def test_should_throw_on_including_time_series_with_none_range(self): + with self.store.open_session() as session: + self.assertRaisesWithMessage( + session.load, + ValueError, + "Time range type cannot be set to NONE when time is specified.", + "orders/1-A", + Order, + lambda i: i.include_documents("company").include_all_time_series_by_time( + TimeSeriesRangeType.NONE, TimeValue.of_minutes(-30) + ), + ) + with self.store.open_session() as session: + self.assertRaisesWithMessage( + session.load, + ValueError, + "Time range type cannot be set to NONE when time is specified.", + "orders/1-A", + Order, + lambda i: i.include_documents("company").include_all_time_series_by_time( + TimeSeriesRangeType.NONE, TimeValue.ZERO() + ), + ) + + with self.store.open_session() as session: + self.assertRaisesWithMessage( + session.load, + ValueError, + "Time range type cannot be set to NONE when count is specified.", + "orders/1-A", + Order, + lambda i: i.include_documents("company").include_all_time_series_by_count( + TimeSeriesRangeType.NONE, 1024 + ), + ) + + with self.store.open_session() as session: + self.assertRaisesWithMessage( + session.load, + ValueError, + "Time range type cannot be set to NONE when time is specified.", + "orders/1-A", + Order, + lambda i: i.include_documents("company").include_all_time_series_by_time( + TimeSeriesRangeType.NONE, TimeValue.of_minutes(30) + ), + ) + + self.assertEqual(0, session.advanced.number_of_requests) + + def test_should_throw_on_including_time_series_with_last_range_zero_or_negative_time(self): + with self.store.open_session() as session: + self.assertRaisesWithMessage( + session.load, + ValueError, + "Time range type cannot be set to LAST when time is negative or zero.", + "orders/1-A", + Order, + lambda i: i.include_documents("company").include_all_time_series_by_time( + TimeSeriesRangeType.LAST, TimeValue.MIN_VALUE() + ), + ) + + self.assertRaisesWithMessage( + session.load, + ValueError, + "Time range type cannot be set to LAST when time is negative or zero.", + "orders/1-A", + Order, + lambda i: i.include_documents("company").include_all_time_series_by_time( + TimeSeriesRangeType.LAST, TimeValue.ZERO() + ), + ) + + self.assertEqual(0, session.advanced.number_of_requests) + + def test_should_throw_on_include_all_time_series_after_including_time_series(self): + with self.store.open_session() as session: + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11) + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.of_minutes(10)), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, int_max) + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.of_minutes(11)), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_time_series_by_range_type_and_count("heartrate", TimeSeriesRangeType.LAST, int_max) + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.of_minutes(10)), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_time_series_by_range_type_and_count("heartrate", TimeSeriesRangeType.LAST, int_max) + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_time_series_by_range_type_and_count("heartrate", TimeSeriesRangeType.LAST, 11) + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, TimeValue.of_minutes(10)), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_time_series_by_range_type_and_count("heartrate", TimeSeriesRangeType.LAST, 11) + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, TimeValue.of_minutes(11)), + ) + + self.assertEqual(0, session.advanced.number_of_requests) + + def test_should_throw_on_including_time_series_after_include_all_time_series(self): + with self.store.open_session() as session: + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' " + "after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11) + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.of_minutes(10)), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' " + "after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, int_max) + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11), + ) + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder: Cannot use 'includeTimeSeries' or 'includeAllTimeSeries' " + "after using 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.of_minutes(10)) + .include_time_series_by_range_type_and_count("heartrate", TimeSeriesRangeType.LAST, int_max), + ) + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder: Cannot use 'includeTimeSeries' or 'includeAllTimeSeries' " + "after using 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11) + .include_time_series_by_range_type_and_time( + "heartrate", TimeSeriesRangeType.LAST, TimeValue.MAX_VALUE() + ), + ) + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder: Cannot use 'includeTimeSeries' or 'includeAllTimeSeries' " + "after using 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.of_minutes(10)) + .include_time_series_by_range_type_and_count("heartrate", TimeSeriesRangeType.LAST, 11), + ) + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder: Cannot use 'includeTimeSeries' or 'includeAllTimeSeries' " + "after using 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11) + .include_time_series_by_range_type_and_time( + "heartrate", TimeSeriesRangeType.LAST, TimeValue.of_minutes(10) + ), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' " + "after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11) + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.of_minutes(10)), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder : Cannot use 'includeAllTimeSeries' " + "after using 'includeTimeSeries' or 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.MAX_VALUE()) + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder: Cannot use 'includeTimeSeries' or 'includeAllTimeSeries' " + "after using 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.of_minutes(10)) + .include_time_series_by_range_type_and_time( + "heartrate", TimeSeriesRangeType.LAST, TimeValue.MAX_VALUE() + ), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder: Cannot use 'includeTimeSeries' or 'includeAllTimeSeries' " + "after using 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11) + .include_time_series_by_range_type_and_time( + "heartrate", TimeSeriesRangeType.LAST, TimeValue.MAX_VALUE() + ), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder: Cannot use 'includeTimeSeries' or 'includeAllTimeSeries' " + "after using 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_time(TimeSeriesRangeType.LAST, TimeValue.of_minutes(10)) + .include_time_series_by_range_type_and_count("heartrate", TimeSeriesRangeType.LAST, 11), + ) + + self.assertRaisesWithMessage( + session.load, + RuntimeError, + "IncludeBuilder: Cannot use 'includeTimeSeries' or 'includeAllTimeSeries' " + "after using 'includeAllTimeSeries'.", + "orders/1-A", + Order, + lambda i: i.include_documents("company") + .include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11) + .include_time_series_by_range_type_and_count("heartrate", TimeSeriesRangeType.LAST, 11), + ) + + self.assertEqual(0, session.advanced.number_of_requests) + + def test_should_throw_on_including_time_series_with_negative_count(self): + with self.store.open_session() as session: + self.assertRaisesWithMessage( + session.load, + ValueError, + "Count have to be positive", + "orders/1-A", + Order, + lambda i: i.include_documents("company").include_all_time_series_by_count( + TimeSeriesRangeType.LAST, -1024 + ), + ) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_operations.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_operations.py index 131be518..e9b236df 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_operations.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_operations.py @@ -1,12 +1,18 @@ from datetime import datetime, timedelta +from ravendb import SessionOptions from ravendb.documents.operations.time_series import ( GetTimeSeriesOperation, TimeSeriesOperation, TimeSeriesBatchOperation, GetTimeSeriesStatisticsOperation, + GetMultipleTimeSeriesOperation, + TimeSeriesDetails, ) +from ravendb.documents.session.time_series import TimeSeriesRange +from ravendb.tests.jvm_migrated_tests.client_tests.time_series_tests.test_time_series_raw_query import RawQueryResult from ravendb.tests.test_base import TestBase, User +from ravendb.tools.raven_test_helper import RavenTestHelper class TestTimeSeriesOperations(TestBase): @@ -199,3 +205,538 @@ def test_get_time_series_statistics(self): self.assertEqual(base_line + timedelta(minutes=10 * 10), ts2.start_date) self.assertEqual(base_line + timedelta(minutes=20 * 10), ts2.end_date) + + def test_can_get_multiple_ranges_in_single_request(self): + base_line = datetime(2023, 8, 20, 21, 30) + document_id = "users/ayende" + + with self.store.open_session() as session: + session.store(User(), document_id) + session.save_changes() + + time_series_op = TimeSeriesOperation("Heartrate") + for i in range(361): + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=i * 10), [59], "watches/fitbit") + ) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + self.store.operations.send(time_series_batch) + + time_series_details: TimeSeriesDetails = self.store.operations.send( + GetMultipleTimeSeriesOperation( + document_id, + [ + TimeSeriesRange("Heartrate", base_line + timedelta(minutes=5), base_line + timedelta(minutes=10)), + TimeSeriesRange("Heartrate", base_line + timedelta(minutes=15), base_line + timedelta(minutes=30)), + TimeSeriesRange("Heartrate", base_line + timedelta(minutes=40), base_line + timedelta(minutes=60)), + ], + ) + ) + + self.assertEqual(document_id, time_series_details.key) + self.assertEqual(1, len(time_series_details.values)) + self.assertEqual(3, len(time_series_details.values.get("Heartrate"))) + + range_ = time_series_details.values.get("Heartrate")[0] + + self.assertEqual(base_line + timedelta(minutes=5), range_.from_date) + self.assertEqual(base_line + timedelta(minutes=10), range_.to_date) + + self.assertEqual(31, len(range_.entries)) + self.assertEqual(base_line + timedelta(minutes=5), range_.entries[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=10), range_.entries[30].timestamp) + + range_ = time_series_details.values.get("Heartrate")[1] + + self.assertEqual(base_line + timedelta(minutes=15), range_.from_date) + self.assertEqual(base_line + timedelta(minutes=30), range_.to_date) + + self.assertEqual(91, len(range_.entries)) + self.assertEqual(base_line + timedelta(minutes=15), range_.entries[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=30), range_.entries[90].timestamp) + + range_ = time_series_details.values.get("Heartrate")[2] + + self.assertEqual(base_line + timedelta(minutes=40), range_.from_date) + self.assertEqual(base_line + timedelta(minutes=60), range_.to_date) + + self.assertEqual(121, len(range_.entries)) + self.assertEqual(base_line + timedelta(minutes=40), range_.entries[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=60), range_.entries[120].timestamp) + + def test_can_get_multiple_time_series_in_single_request(self): + document_id = "users/ayende" + + with self.store.open_session() as session: + session.store(User(), document_id) + session.save_changes() + + # append + + base_line = datetime(2023, 8, 20, 21, 30) + + time_series_op = TimeSeriesOperation("Heartrate") + + for i in range(11): + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(minutes=i * 10), [72], "watches/fitbit") + ) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + + self.store.operations.send(time_series_batch) + + time_series_op = TimeSeriesOperation("BloodPressure") + + for i in range(11): + time_series_op.append(TimeSeriesOperation.AppendOperation(base_line + timedelta(minutes=i * 10), [80])) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + self.store.operations.send(time_series_batch) + + time_series_op = TimeSeriesOperation("Temperature") + + for i in range(11): + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(minutes=i * 10), [37 + i * 0.15]) + ) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + self.store.operations.send(time_series_batch) + + # get ranges from multiple time series in a single request + + time_series_details: TimeSeriesDetails = self.store.operations.send( + GetMultipleTimeSeriesOperation( + document_id, + [ + TimeSeriesRange("Heartrate", base_line, base_line + timedelta(minutes=15)), + TimeSeriesRange("Heartrate", base_line + timedelta(minutes=30), base_line + timedelta(minutes=45)), + TimeSeriesRange("BloodPressure", base_line, base_line + timedelta(minutes=30)), + TimeSeriesRange( + "BloodPressure", base_line + timedelta(minutes=60), base_line + timedelta(minutes=90) + ), + TimeSeriesRange("Temperature", base_line, base_line + timedelta(days=1)), + ], + ) + ) + + self.assertEqual(document_id, time_series_details.key) + self.assertEqual(3, len(time_series_details.values)) + + self.assertEqual(2, len(time_series_details.values["Heartrate"])) + + range_ = time_series_details.values.get("Heartrate")[0] + + self.assertEqual(base_line, range_.from_date) + self.assertEqual(base_line + timedelta(minutes=15), range_.to_date) + + self.assertEqual(2, len(range_.entries)) + + self.assertEqual(base_line, range_.entries[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=10), range_.entries[1].timestamp) + + self.assertIsNone(range_.total_results) + + range_ = time_series_details.values.get("Heartrate")[1] + + self.assertEqual(base_line + timedelta(minutes=30), range_.from_date) + self.assertEqual(base_line + timedelta(minutes=45), range_.to_date) + + self.assertEqual(2, len(range_.entries)) + self.assertEqual(base_line + timedelta(minutes=30), range_.entries[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=40), range_.entries[1].timestamp) + + self.assertIsNone(range_.total_results) + + range_ = time_series_details.values.get("BloodPressure")[0] + self.assertEqual(base_line, range_.from_date) + self.assertEqual(base_line + timedelta(minutes=30), range_.to_date) + + self.assertEqual(4, len(range_.entries)) + + self.assertEqual(base_line, range_.entries[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=30), range_.entries[3].timestamp) + + self.assertIsNone(range_.total_results) + + range_ = time_series_details.values.get("BloodPressure")[1] + + self.assertEqual(base_line + timedelta(minutes=60), range_.from_date) + self.assertEqual(base_line + timedelta(minutes=90), range_.to_date) + + self.assertEqual(4, len(range_.entries)) + + self.assertEqual(base_line + timedelta(minutes=60), range_.entries[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=90), range_.entries[3].timestamp) + + self.assertIsNone(range_.total_results) + + self.assertEqual(1, len(time_series_details.values.get("Temperature"))) + + range_ = time_series_details.values.get("Temperature")[0] + + self.assertEqual(base_line, range_.from_date) + self.assertEqual(base_line + timedelta(days=1), range_.to_date) + + self.assertEqual(11, len(range_.entries)) + + self.assertEqual(base_line, range_.entries[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=100), range_.entries[10].timestamp) + + self.assertEqual(11, range_.total_results) # full range + + def test_get_multiple_time_series_should_throw_on_missing_name_from_range(self): + document_id = "users/ayende" + + with self.store.open_session() as session: + session.store(User(), document_id) + session.save_changes() + + base_line = datetime(2023, 8, 20, 21, 30) + + time_series_op = TimeSeriesOperation("Heartrate") + + for i in range(11): + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(minutes=i * 10), [72], "watches/fitbit") + ) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + + self.store.operations.send(time_series_batch) + + self.assertRaisesWithMessage( + self.store.operations.send, + ValueError, + "Missing name argument in TimeSeriesRange. Name cannot be None or empty", + GetMultipleTimeSeriesOperation(document_id, [TimeSeriesRange(None, base_line, None)]), + ) + + def test_should_throw_on_null_or_empty_ranges(self): + document_id = "users/ayende" + + with self.store.open_session() as session: + session.store(User(), document_id) + session.save_changes() + + base_line = datetime(2023, 8, 20, 21, 30) + + time_series_op = TimeSeriesOperation("Heartrate") + + for i in range(11): + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(minutes=i * 10), [72], "watches/fitbit") + ) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + + self.store.operations.send(time_series_batch) + + with self.assertRaises(ValueError): + self.store.operations.send(GetTimeSeriesOperation(document_id, None)) + + with self.assertRaises(ValueError): + self.store.operations.send(GetMultipleTimeSeriesOperation(document_id, [])) + + def test_get_time_series_should_throw_on_missing_name(self): + document_id = "users/ayende" + + with self.store.open_session() as session: + session.store(User(), document_id) + session.save_changes() + + base_line = datetime(2023, 8, 20, 21, 30) + + time_series_op = TimeSeriesOperation("Heartrate") + + for i in range(11): + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(minutes=i * 10), [72], "watches/fitbit") + ) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + + self.store.operations.send(time_series_batch) + failed = False + try: + GetTimeSeriesOperation(document_id, "", base_line, base_line + timedelta(days=3650)) + except ValueError as ex: + failed = True + self.assertIn(ex.args[0], "Timeseries cannot be None or empty") + + self.assertTrue(failed) + + def test_can_delete_large_range(self): + document_id = "foo/bar" + base_line = datetime(2023, 8, 20, 0, 0) - timedelta(seconds=1) + + with self.store.open_session() as session: + session.store(User(), document_id) + tsf = session.time_series_for(document_id, "BloodPressure") + + for j in range(1, 10000, 1): + offset = j * 10 + time = base_line + timedelta(seconds=offset) + + tsf.append(time, [j], "watches/apple") + + session.save_changes() + + raw_query = ( + "declare timeseries blood_pressure(doc)\n" + " {\n" + " from doc.BloodPressure between $start and $end\n" + " group by 1h\n" + " select min(), max(), avg(), first(), last()\n" + " }\n" + " from Users as p\n" + " select blood_pressure(p) as blood_pressure" + ) + + with self.store.open_session() as session: + query = ( + session.advanced.raw_query(raw_query, RawQueryResult) + .add_parameter("start", base_line) + .add_parameter("end", base_line + timedelta(days=1)) + ) + + result = list(query) + + self.assertEqual(1, len(result)) + + agg = result[0] + + blood_pressure = agg.blood_pressure + count = sum(map(lambda x: x.count[0], blood_pressure.results)) + self.assertEqual(8640, count) + self.assertEqual(blood_pressure.count, count) + self.assertEqual(24, len(blood_pressure.results)) + + for index in range(len(blood_pressure.results)): + item = blood_pressure.results[index] + self.assertEqual(360, item.count[0]) + self.assertEqual(index * 360 + 180 + 0.5, item.average[0]) + self.assertEqual((index + 1) * 360, item.max[0]) + self.assertEqual(index * 360 + 1, item.min[0]) + self.assertEqual(index * 360 + 1, item.first[0]) + self.assertEqual((index + 1) * 360, item.last[0]) + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, "BloodPressure") + tsf.delete(base_line + timedelta(seconds=3600), base_line + timedelta(seconds=3600 * 10)) # remove 9 hours + session.save_changes() + + session_options = SessionOptions(no_caching=True) + with self.store.open_session(session_options=session_options) as session: + query = ( + session.advanced.raw_query(raw_query, RawQueryResult) + .add_parameter("start", base_line) + .add_parameter("end", base_line + timedelta(days=1)) + ) + result = list(query) + agg = result[0] + blood_pressure = agg.blood_pressure + count = sum(map(lambda x: x.count[0], blood_pressure.results)) + self.assertEqual(5399, count) + self.assertEqual(blood_pressure.count, count) + self.assertEqual(15, len(blood_pressure.results)) + + index = 0 + + item = blood_pressure.results[index] + self.assertEqual(359, item.count[0]) + self.assertEqual(180, item.average[0]) + self.assertEqual(359, item.max[0]) + self.assertEqual(1, item.min[0]) + self.assertEqual(1, item.first[0]) + self.assertEqual(359, item.last[0]) + + for index in range(1, len(blood_pressure.results)): + item = blood_pressure.results[index] + real_index = index + 9 + + self.assertEqual(360, item.count[0]) + self.assertEqual(real_index * 360 + 180 + 0.5, item.average[0]) + self.assertEqual((real_index + 1) * 360, item.max[0]) + self.assertEqual(real_index * 360 + 1, item.min[0]) + self.assertEqual(real_index * 360 + 1, item.first[0]) + self.assertEqual((real_index + 1) * 360, item.last[0]) + + def test_should_throw_on_attempt_to_create_time_series_on_missing_document(self): + base_line = datetime(2023, 8, 20, 21, 30) + time_series_op = TimeSeriesOperation("Heartrate") + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=1), [59], "watches/fitbit") + ) + + time_series_batch = TimeSeriesBatchOperation("users/ayende", time_series_op) + self.assertRaisesWithMessage( + self.store.operations.send, + RuntimeError, + "Document 'users/ayende' does not exist. Cannot operate on time series of a missing document", + time_series_batch, + ) + + def test_can_get_non_existing_range(self): + with self.store.open_session() as session: + session.store(User(), "users/ayende") + session.save_changes() + + base_line = RavenTestHelper.utc_today() + time_series_op = TimeSeriesOperation("Heartrate") + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=1), [59], "waches/fitbit") + ) + + time_series_batch = TimeSeriesBatchOperation("users/ayende", time_series_op) + self.store.operations.send(time_series_batch) + + time_series_range_result = self.store.operations.send( + GetTimeSeriesOperation( + "users/ayende", "Heartrate", base_line - timedelta(days=62), base_line - timedelta(days=31) + ) + ) + self.assertEqual(0, len(time_series_range_result.entries)) + + def test_can_create_and_get_simple_time_series_using_store_operations(self): + document_id = "users/ayende" + with self.store.open_session() as session: + session.store(User(), document_id) + session.save_changes() + + base_line = RavenTestHelper.utc_this_month() + + append1 = TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=1), [59], "watches/fitbit") + + time_series_op = TimeSeriesOperation("Heartrate") + + time_series_op.append(append1) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + + self.store.operations.send(time_series_batch) + + time_series_range_result = self.store.operations.send(GetTimeSeriesOperation(document_id, "Heartrate")) + + self.assertEqual(1, len(time_series_range_result.entries)) + + value = time_series_range_result.entries[0] + self.assertEqual(59, value.values[0]) + self.assertEqual("watches/fitbit", value.tag) + self.assertEqual(base_line + timedelta(seconds=1), value.timestamp) + + def test_can_store_and_read_multiple_timestamp_using_store_operations(self): + document_id = "users/ayende" + + with self.store.open_session() as session: + session.store(User(), document_id) + session.save_changes() + + base_line = RavenTestHelper.utc_this_month() + + time_series_op = TimeSeriesOperation("Heartrate") + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=1), [59], "watches/fitbit") + ) + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=2), [61], "watches/fitbit") + ) + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=5), [60], "watches/apple-watch") + ) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + + self.store.operations.send(time_series_batch) + + time_series_range_result = self.store.operations.send(GetTimeSeriesOperation(document_id, "Heartrate")) + + self.assertEqual(3, len(time_series_range_result.entries)) + + value = time_series_range_result.entries[0] + self.assertEqual(59, value.values[0]) + self.assertEqual("watches/fitbit", value.tag) + self.assertEqual(base_line + timedelta(seconds=1), value.timestamp) + + value = time_series_range_result.entries[1] + self.assertEqual(61, value.values[0]) + self.assertEqual("watches/fitbit", value.tag) + self.assertEqual(base_line + timedelta(seconds=2), value.timestamp) + + value = time_series_range_result.entries[2] + self.assertEqual(60, value.values[0]) + self.assertEqual("watches/apple-watch", value.tag) + self.assertEqual(base_line + timedelta(seconds=5), value.timestamp) + + def test_can_append_and_remove_timestamp_in_single_batch(self): + document_id = "users/ayende" + + with self.store.open_session() as session: + session.store(User(), document_id) + session.save_changes() + + base_line = RavenTestHelper.utc_this_month() + + time_series_op = TimeSeriesOperation("Heartrate") + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=1), [59], "watches/fitbit") + ) + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=2), [61], "watches/fitbit") + ) + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=3), [61.5], "watches/fitbit") + ) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + + self.store.operations.send(time_series_batch) + + time_series_range_result = self.store.operations.send( + GetTimeSeriesOperation(document_id, "Heartrate", None, None) + ) + + self.assertEqual(3, len(time_series_range_result.entries)) + + time_series_op = TimeSeriesOperation("Heartrate") + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=4), [60], "watches/fitbit") + ) + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=5), [62.5], "watches/fitbit") + ) + time_series_op.append( + TimeSeriesOperation.AppendOperation(base_line + timedelta(seconds=6), [62], "watches/fitbit") + ) + + time_series_op.delete( + TimeSeriesOperation.DeleteOperation(base_line + timedelta(seconds=2), base_line + timedelta(seconds=3)) + ) + + time_series_batch = TimeSeriesBatchOperation(document_id, time_series_op) + + self.store.operations.send(time_series_batch) + + time_series_range_result = self.store.operations.send( + GetTimeSeriesOperation(document_id, "Heartrate", None, None) + ) + + self.assertEqual(4, len(time_series_range_result.entries)) + + value = time_series_range_result.entries[0] + self.assertEqual(59, value.values[0]) + self.assertEqual(base_line + timedelta(seconds=1), value.timestamp) + + value = time_series_range_result.entries[1] + self.assertEqual(60, value.values[0]) + self.assertEqual(base_line + timedelta(seconds=4), value.timestamp) + + value = time_series_range_result.entries[2] + self.assertEqual(62.5, value.values[0]) + self.assertEqual(base_line + timedelta(seconds=5), value.timestamp) + + value = time_series_range_result.entries[3] + self.assertEqual(62, value.values[0]) + self.assertEqual(base_line + timedelta(seconds=6), value.timestamp) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_ranges_cache.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_ranges_cache.py index c80ea43e..c0f42cf6 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_ranges_cache.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_ranges_cache.py @@ -73,3 +73,527 @@ def test_should_get_time_series_value_from_cache(self): self.assertEqual(base_line + timedelta(minutes=1), val.timestamp) self.assertEqual(1, session.advanced.number_of_requests) + + def test_merge_time_series_range_in_cache(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/ayende" + ts_name = "Heartrate" + tag = "watches/fitbit" + + with self.store.open_session() as session: + session.store(User(name="Oren"), doc_id) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(doc_id, ts_name) + for i in range(360): + tsf.append_single(base_line + timedelta(seconds=i * 10), 6, tag) + session.save_changes() + + with self.store.open_session() as session: + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=2), base_line + timedelta(minutes=10) + ) + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual(49, len(vals)) + self.assertEqual(base_line + timedelta(minutes=2), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=10), vals[48].timestamp) + + # should load partial range from cache + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=5), base_line + timedelta(minutes=7) + ) + + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual(13, len(vals)) + self.assertEqual(base_line + timedelta(minutes=5), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=7), vals[12].timestamp) + + # should go to server + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=40), base_line + timedelta(minutes=50) + ) + + self.assertEqual(2, session.advanced.number_of_requests) + self.assertEqual(61, len(vals)) + self.assertEqual(base_line + timedelta(minutes=40), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=50), vals[60].timestamp) + + cache = session.time_series_by_doc_id.get(doc_id) + self.assertIsNotNone(cache) + ranges = cache.get(ts_name) + self.assertIsNotNone(ranges) + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=2), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=10), ranges[0].to_date) + + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # should go to server to get [0, 2] and merge it into existing [2, 10] + vals = session.time_series_for(doc_id, ts_name).get(base_line, base_line + timedelta(minutes=5)) + + self.assertEqual(3, session.advanced.number_of_requests) + + self.assertEqual(31, len(vals)) + self.assertEqual(base_line, vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=5), vals[30].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=0), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=10), ranges[0].to_date) + + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # should go to server to get [10, 16] and merge it into existing [0, 10] + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=8), base_line + timedelta(minutes=16) + ) + + self.assertEqual(4, session.advanced.number_of_requests) + + self.assertEqual(49, len(vals)) + self.assertEqual(base_line + timedelta(minutes=8), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=16), vals[48].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=0), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=16), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # should go to server to get range [17, 19] + # and add it between [10, 16] and [40, 50] + + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=17), base_line + timedelta(minutes=19) + ) + + self.assertEqual(5, session.advanced.number_of_requests) + + self.assertEqual(13, len(vals)) + self.assertEqual(base_line + timedelta(minutes=17), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=19), vals[12].timestamp) + + self.assertEqual(base_line + timedelta(minutes=0), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=16), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=17), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=19), ranges[1].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[2].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[2].to_date) + + # should go to server to get range [19, 40] + # and merge the result with existing ranges [17, 19] and [40, 50] + # into single range [17, 50] + + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=18), base_line + timedelta(minutes=48) + ) + self.assertEqual(6, session.advanced.number_of_requests) + + self.assertEqual(181, len(vals)) + self.assertEqual(base_line + timedelta(minutes=18), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=48), vals[180].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=0), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=16), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=17), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # should get to server to get range [16, 17] + # and merge the result with existing ranges [0, 16] and [17, 50] + # into single range [0, 50] + + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=12), base_line + timedelta(minutes=22) + ) + + self.assertEqual(7, session.advanced.number_of_requests) + + self.assertEqual(61, len(vals)) + self.assertEqual(base_line + timedelta(minutes=12), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=22), vals[60].timestamp) + + self.assertEqual(1, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=0), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[0].to_date) + + def test_can_handle_ranges_with_no_values(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/ayende" + ts_name = "Heartrate" + tag = "watches/fitbit" + + with self.store.open_session() as session: + session.store(User(name="Oren"), doc_id) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(doc_id, ts_name) + for i in range(360): + tsf.append_single(base_line + timedelta(seconds=i * 10), 60, tag) + session.save_changes() + + with self.store.open_session() as session: + vals = session.time_series_for(doc_id, ts_name).get( + base_line - timedelta(hours=2), base_line - timedelta(hours=1) + ) + self.assertEqual(0, len(vals)) + self.assertEqual(1, session.advanced.number_of_requests) + + # should not go to server + vals = session.time_series_for(doc_id, ts_name).get( + base_line - timedelta(hours=2), base_line - timedelta(hours=1) + ) + + self.assertEqual(0, len(vals)) + self.assertEqual(1, session.advanced.number_of_requests) + + # should not go to server + vals = session.time_series_for(doc_id, ts_name).get( + base_line - timedelta(minutes=90), base_line - timedelta(minutes=70) + ) + + self.assertEqual(0, len(vals)) + self.assertEqual(1, session.advanced.number_of_requests) + + # should go to server to get[-60, 1] and merge with [-120, -60] + vals = session.time_series_for(doc_id, ts_name).get( + base_line - timedelta(hours=1), base_line + timedelta(minutes=1) + ) + + self.assertEqual(7, len(vals)) + self.assertEqual(base_line, vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=1), vals[6].timestamp) + self.assertEqual(2, session.advanced.number_of_requests) + + cache = session.time_series_by_doc_id.get("users/ayende") + ranges = cache[ts_name] + + self.assertIsNotNone(ranges) + self.assertEqual(1, len(ranges)) + + self.assertEqual(base_line - timedelta(hours=2), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].to_date) + + def test_should_merge_time_series_ranges_in_cache_2(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/ayende" + ts_name = "Heartrate" + tag = "watches/fitbit" + + with self.store.open_session() as session: + session.store(User(name="Oren"), doc_id) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(doc_id, ts_name) + for i in range(360): + tsf.append_single(base_line + timedelta(seconds=i * 10), 60, tag) + + tsf = session.time_series_for(doc_id, ts_name + "2") + tsf.append_single(base_line + timedelta(hours=1), 70, tag) + tsf.append_single(base_line + timedelta(minutes=90), 75, tag) + session.save_changes() + + with self.store.open_session() as session: + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=2), base_line + timedelta(minutes=10) + ) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(49, len(vals)) + self.assertEqual(base_line + timedelta(minutes=2), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=10), vals[48].timestamp) + + # should go to server + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=22), base_line + timedelta(minutes=32) + ) + self.assertEqual(2, session.advanced.number_of_requests) + + self.assertEqual(61, len(vals)) + self.assertEqual(base_line + timedelta(minutes=22), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=32), vals[60].timestamp) + + # should go to server + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=1), base_line + timedelta(minutes=11) + ) + self.assertEqual(3, session.advanced.number_of_requests) + + self.assertEqual(61, len(vals)) + self.assertEqual(base_line + timedelta(minutes=1), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=11), vals[60].timestamp) + + cache = session.time_series_by_doc_id.get(doc_id) + + ranges = cache[ts_name] + + self.assertIsNotNone(ranges) + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=11), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=22), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=32), ranges[1].to_date) + + # should go to server to get [32, 35] and merge with [22, 32] + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=25), base_line + timedelta(minutes=35) + ) + self.assertEqual(4, session.advanced.number_of_requests) + + self.assertEqual(61, len(vals)) + self.assertEqual(base_line + timedelta(minutes=25), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=35), vals[60].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=11), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=22), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=35), ranges[1].to_date) + + # should go to server to get [20, 22] and [35, 40] + # and merge them with [22, 35] into a single range [20, 40] + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=20), base_line + timedelta(minutes=40) + ) + self.assertEqual(5, session.advanced.number_of_requests) + + self.assertEqual(121, len(vals)) + self.assertEqual(base_line + timedelta(minutes=20), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=40), vals[120].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=11), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=20), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].to_date) + + # should go to server to get [15, 20] and merge with [20, 40] + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=15), base_line + timedelta(minutes=35) + ) + self.assertEqual(6, session.advanced.number_of_requests) + + self.assertEqual(121, len(vals)) + self.assertEqual(base_line + timedelta(minutes=15), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=35), vals[120].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=11), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=15), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].to_date) + + # should go to server and add new cache entry for Heartrate2 + vals = session.time_series_for(doc_id, ts_name + "2").get(base_line, base_line + timedelta(hours=2)) + self.assertEqual(7, session.advanced.number_of_requests) + + self.assertEqual(2, len(vals)) + self.assertEqual(base_line + timedelta(hours=1), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=90), vals[1].timestamp) + + ranges2 = cache.get(ts_name + "2") + self.assertEqual(1, len(ranges2)) + self.assertEqual(base_line, ranges2[0].from_date) + self.assertEqual(base_line + timedelta(hours=2), ranges2[0].to_date) + + # should not go to server + vals = session.time_series_for(doc_id, ts_name + "2").get( + base_line + timedelta(minutes=30), base_line + timedelta(minutes=100) + ) + + self.assertEqual(7, session.advanced.number_of_requests) + + self.assertEqual(2, len(vals)) + self.assertEqual(base_line + timedelta(minutes=60), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=90), vals[1].timestamp) + + # should go to server + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=42), base_line + timedelta(minutes=43) + ) + self.assertEqual(8, session.advanced.number_of_requests) + + self.assertEqual(7, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=42), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=43), vals[6].timestamp) + + self.assertEqual(3, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=11), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=15), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].to_date) + self.assertEqual(base_line + timedelta(minutes=42), ranges[2].from_date) + self.assertEqual(base_line + timedelta(minutes=43), ranges[2].to_date) + + # should go to server and to get the missing parts and merge all ranges [0, 45] + vals = session.time_series_for(doc_id, ts_name).get(base_line, base_line + timedelta(minutes=45)) + self.assertEqual(9, session.advanced.number_of_requests) + + self.assertEqual(271, len(vals)) + + self.assertEqual(base_line, vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=45), vals[270].timestamp) + + ranges = cache[ts_name] + self.assertIsNotNone(ranges) + self.assertEqual(1, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=0), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=45), ranges[0].to_date) + + def test_should_merge_time_series_range_in_cache_3(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/ayende" + ts_name = "Heartrate" + tag = "watches/fitbit" + + with self.store.open_session() as session: + session.store(User(name="Oren"), doc_id) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(doc_id, ts_name) + for i in range(360): + tsf.append_single(base_line + timedelta(seconds=i * 10), 60, tag) + + tsf = session.time_series_for(doc_id, ts_name) + + tsf.append_single(base_line + timedelta(hours=1), 70, tag) + tsf.append_single(base_line + timedelta(minutes=90), 75, tag) + + session.save_changes() + + with self.store.open_session() as session: + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=1), base_line + timedelta(minutes=2) + ) + + self.assertEqual(7, len(vals)) + self.assertEqual(base_line + timedelta(minutes=1), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=2), vals[6].timestamp) + + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=5), base_line + timedelta(minutes=6) + ) + + self.assertEqual(2, session.advanced.number_of_requests) + + self.assertEqual(7, len(vals)) + self.assertEqual(base_line + timedelta(minutes=5), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=6), vals[6].timestamp) + + cache = session.time_series_by_doc_id.get(doc_id) + ranges = cache.get(ts_name) + self.assertIsNotNone(ranges) + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=2), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=5), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=6), ranges[1].to_date) + + # should go to server to get [2, 3] and merge with [1, 2] + + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=2), base_line + timedelta(minutes=3) + ) + self.assertEqual(3, session.advanced.number_of_requests) + + self.assertEqual(7, len(vals)) + self.assertEqual(base_line + timedelta(minutes=2), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=3), vals[6].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=3), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=5), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=6), ranges[1].to_date) + + # should go to server to get [4, 5] and merge with [5, 6] + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=4), base_line + timedelta(minutes=5) + ) + self.assertEqual(4, session.advanced.number_of_requests) + + self.assertEqual(7, len(vals)) + self.assertEqual(base_line + timedelta(minutes=4), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=5), vals[6].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=3), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=4), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=6), ranges[1].to_date) + + # should go to server to get [3, 4] and merge all ranges into [1, 6] + + vals = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(minutes=3), base_line + timedelta(minutes=4) + ) + + self.assertEqual(5, session.advanced.number_of_requests) + + self.assertEqual(7, len(vals)) + self.assertEqual(base_line + timedelta(minutes=3), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=4), vals[6].timestamp) + + self.assertEqual(1, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=1), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=6), ranges[0].to_date) + + def test_should_get_partial_range_from_cache_2(self): + start = 5 + page_size = 10 + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/ayende" + ts_name = "Heartrate" + tag = "watches/fitbit" + + with self.store.open_session() as session: + session.store(User(name="Oren"), doc_id) + session.time_series_for(doc_id, ts_name).append_single(base_line + timedelta(minutes=1), 59, tag) + session.time_series_for(doc_id, ts_name).append_single(base_line + timedelta(minutes=2), 60, tag) + session.time_series_for(doc_id, ts_name).append_single(base_line + timedelta(minutes=3), 61, tag) + session.save_changes() + + with self.store.open_session() as session: + val = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(days=2), base_line + timedelta(days=3), start, page_size + ) + self.assertEqual(0, len(val)) + self.assertEqual(1, session.advanced.number_of_requests) + val = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(days=1), base_line + timedelta(days=4), start, page_size + ) + self.assertEqual(0, len(val)) + self.assertEqual(2, session.advanced.number_of_requests) + + with self.store.open_session() as session: + val = session.time_series_for(doc_id, ts_name).get(start=start, page_size=page_size) + self.assertEqual(0, len(val)) + self.assertEqual(1, session.advanced.number_of_requests) + + val = session.time_series_for(doc_id, ts_name).get( + base_line + timedelta(days=1), base_line + timedelta(days=4), start, page_size + ) + self.assertEqual(0, len(val)) + self.assertEqual(1, session.advanced.number_of_requests) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_raw_query.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_raw_query.py new file mode 100644 index 00000000..856444bf --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_raw_query.py @@ -0,0 +1,324 @@ +from __future__ import annotations +from datetime import datetime, timedelta +from typing import Any, Dict + +from ravendb import AbstractIndexCreationTask +from ravendb.documents.queries.time_series import TimeSeriesAggregationResult, TimeSeriesRawResult +from ravendb.tests.test_base import TestBase + +document_id = "users/ayende" +base_line = datetime(2023, 8, 20, 21, 00) +base_line2 = base_line - timedelta(days=1) +ts_name_1 = "Heartrate" +ts_name_2 = "BloodPressure" +tag1 = "watches/fitbit" +tag2 = "watches/apple" + + +class PeopleIndex(AbstractIndexCreationTask): + def __init__(self): + super(PeopleIndex, self).__init__() + self.map = "from p in docs.People select new { p.age }" + + @property + def index_name(self) -> str: + return "People" + + +class Event: + def __init__(self, start: datetime = None, end: datetime = None, description: str = None): + self.start = start + self.end = end + self.description = description + + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> Event: + return cls(json_dict["start"], json_dict["end"], json_dict["description"]) + + +class NestedClass: + def __init__(self, event: Event = None, accuracy: float = None): + self.event = event + self.accuracy = accuracy + + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> NestedClass: + return cls(Event.from_json(json_dict["event"]), json_dict["accuracy"]) + + +class AdditionalData: + def __init__(self, nested_class: NestedClass = None): + self.nested_class = nested_class + + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> AdditionalData: + return cls(NestedClass.from_json(json_dict["nested_class"])) + + +class Person: + def __init__( + self, + name: str = None, + age: int = None, + works_at: str = None, + event: str = None, + additional_data: AdditionalData = None, + ): + self.name = name + self.age = age + self.works_at = works_at + self.event = event + self.additional_data = additional_data + + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> Person: + return cls( + json_dict["name"], + json_dict["age"], + json_dict["works_at"], + json_dict["event"], + AdditionalData.from_json(json_dict["additional_data"]), + ) + + +class RawQueryResult: + def __init__( + self, + heart_rate: TimeSeriesAggregationResult = None, + blood_pressure: TimeSeriesAggregationResult = None, + name: str = None, + ): + self.heart_rate = heart_rate + self.blood_pressure = blood_pressure + self.name = name + + @classmethod + def from_json(cls, json_dict: Dict[str, Any]) -> RawQueryResult: + return cls( + TimeSeriesAggregationResult.from_json(json_dict["heart_rate"]) if "heart_rate" in json_dict else None, + TimeSeriesAggregationResult.from_json(json_dict["blood_pressure"]) + if "blood_pressure" in json_dict + else None, + json_dict["name"] if "name" in json_dict else None, + ) + + +class TestTimeSeriesRawQuery(TestBase): + def setUp(self): + super(TestTimeSeriesRawQuery, self).setUp() + + def test_can_query_time_series_aggregation_declare_syntax_multiple_series(self): + with self.store.open_session() as session: + for i in range(4): + id_ = f"people/{i}" + person = Person("Oren", i * 30) + + session.store(person, id_) + + tsf = session.time_series_for(id_, ts_name_1) + + tsf.append_single(base_line + timedelta(minutes=61), 59, tag1) + tsf.append_single(base_line + timedelta(minutes=62), 79, tag1) + tsf.append_single(base_line + timedelta(minutes=63), 69, tag1) + + tsf = session.time_series_for(id_, ts_name_2) + + tsf.append_single(base_line + timedelta(minutes=61), 159, tag2) + tsf.append_single(base_line + timedelta(minutes=62), 179, tag2) + tsf.append_single(base_line + timedelta(minutes=63), 168, tag2) + + session.save_changes() + + PeopleIndex().execute(self.store) + self.wait_for_indexing(self.store) + + with self.store.open_session() as session: + query = ( + session.advanced.raw_query( + "declare timeseries out(p)\n " + "{\n" + " from p.heartRate between $start and $end\n" + " group by 1h\n" + " select min(), max()\n" + "}\n" + "from index 'People' as p\n" + "where p.age > 49\n" + "select out(p) as heart_rate, p.name", + RawQueryResult, + ) + .add_parameter("start", base_line) + .add_parameter("end", base_line + timedelta(days=1)) + ) + + result = list(query) + + self.assertEqual(2, len(result)) + + for i in range(2): + agg = result[i] + self.assertEqual("Oren", agg.name) + + heartrate = agg.heart_rate + + self.assertEqual(3, heartrate.count) + + self.assertEqual(1, len(heartrate.results)) + + val = heartrate.results[0] + self.assertEqual(59, val.min[0]) + self.assertEqual(79, val.max[0]) + + self.assertEqual(base_line + timedelta(minutes=60), val.from_date) + self.assertEqual(base_line + timedelta(minutes=120), val.to_date) + + def test_can_query_time_series_aggregation_no_select_or_group_by_multiple_values(self): + with self.store.open_session() as session: + for i in range(4): + id_ = f"people/{i}" + person = Person("Oren", i * 30) + + session.store(person, id_) + + tsf = session.time_series_for(id_, ts_name_1) + + tsf.append(base_line + timedelta(minutes=61), [59, 159], tag1) + tsf.append(base_line + timedelta(minutes=62), [79, 179], tag1) + tsf.append_single(base_line + timedelta(minutes=63), 69, tag2) + + tsf.append(base_line + timedelta(days=31, minutes=61), [159, 259], tag1) + tsf.append(base_line + timedelta(days=31, minutes=62), [179], tag2) + tsf.append(base_line + timedelta(days=31, minutes=63), [169, 269], tag1) + + session.save_changes() + + with self.store.open_session() as session: + query = ( + session.advanced.raw_query( + "declare timeseries out(x)\n" + "{\n" + " from x.HeartRate between $start and $end\n" + "}\n" + "from People as doc\n" + "where doc.age > 49\n" + "select out(doc)" + "", + TimeSeriesRawResult, + ) + .add_parameter("start", base_line) + .add_parameter("end", base_line + timedelta(days=62)) + ) + + result = list(query) + self.assertEqual(2, len(result)) + + for i in range(2): + agg = result[i] + self.assertEqual(6, len(agg.results)) + + val = agg.results[0] + + self.assertEqual(2, len(val.values)) + self.assertEqual(59, val.values[0]) + self.assertEqual(159, val.values[1]) + + self.assertEqual(tag1, val.tag) + self.assertEqual(base_line + timedelta(minutes=61), val.timestamp) + + val = agg.results[1] + + self.assertEqual(2, len(val.values)) + self.assertEqual(79, val.values[0]) + self.assertEqual(179, val.values[1]) + + self.assertEqual(tag1, val.tag) + self.assertEqual(base_line + timedelta(minutes=62), val.timestamp) + + val = agg.results[2] + + self.assertEqual(1, len(val.values)) + self.assertEqual(69, val.values[0]) + + self.assertEqual(tag2, val.tag) + self.assertEqual(base_line + timedelta(minutes=63), val.timestamp) + + val = agg.results[3] + + self.assertEqual(2, len(val.values)) + self.assertEqual(159, val.values[0]) + self.assertEqual(259, val.values[1]) + + self.assertEqual(tag1, val.tag) + self.assertEqual(base_line + timedelta(minutes=61, days=31), val.timestamp) + + val = agg.results[4] + + self.assertEqual(1, len(val.values)) + self.assertEqual(179, val.values[0]) + + self.assertEqual(tag2, val.tag) + self.assertEqual(base_line + timedelta(minutes=62, days=31), val.timestamp) + + val = agg.results[5] + + self.assertEqual(2, len(val.values)) + self.assertEqual(169, val.values[0]) + self.assertEqual(269, val.values[1]) + + self.assertEqual(tag1, val.tag) + self.assertEqual(base_line + timedelta(minutes=63, days=31), val.timestamp) + + def test_can_query_time_series_aggregation_declare_syntax_with_other_fields(self): + with self.store.open_session() as session: + for i in range(4): + id_ = f"people/{i}" + person = Person("Oren", i * 30) + + session.store(person, id_) + + tsf = session.time_series_for(id_, ts_name_1) + + tsf.append_single(base_line + timedelta(minutes=61), 59, tag1) + tsf.append_single(base_line + timedelta(minutes=62), 79, tag1) + tsf.append_single(base_line + timedelta(minutes=63), 69, tag1) + + session.save_changes() + + PeopleIndex().execute(self.store) + self.wait_for_indexing(self.store) + + with self.store.open_session() as session: + query = ( + session.advanced.raw_query( + "declare timeseries out(p)\n" + "{\n" + " from p.HeartRate between $start and $end\n" + " group by 1h\n" + " select min(), max()\n" + "}\n" + "from index 'People' as p\n" + "where p.age > 49\n" + "select out(p) as heart_rate, p.name", + RawQueryResult, + ) + .add_parameter("start", base_line) + .add_parameter("end", base_line + timedelta(days=1)) + ) + + result = list(query) + + self.assertEqual(2, len(result)) + + for i in range(2): + agg = result[i] + self.assertEqual("Oren", agg.name) + heartrate = agg.heart_rate + + self.assertEqual(3, heartrate.count) + self.assertEqual(1, len(heartrate.results)) + + val = heartrate.results[0] + self.assertEqual(59, val.min[0]) + self.assertEqual(79, val.max[0]) + + self.assertEqual(base_line + timedelta(minutes=60), val.from_date) + self.assertEqual(base_line + timedelta(minutes=120), val.to_date) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_session.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_session.py index 50d1aaa9..31f29877 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_session.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_session.py @@ -631,3 +631,13 @@ def test_can_delete_timestamp(self): self.assertEqual([79], vals[1].values) self.assertEqual("watches/fitbit", vals[1].tag) self.assertEqual(base_line + timedelta(minutes=3), vals[1].timestamp) + + def test_get_all_time_series_names_when_no_time_series(self): + with self.store.open_session() as session: + session.store(User(), "users/karmel") + session.save_changes() + + with self.store.open_session() as session: + user = session.load("users/karmel", User) + ts_names = session.advanced.get_time_series_for(user) + self.assertEqual(0, len(ts_names)) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_typed_session.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_typed_session.py index 2cd56756..b9d17aeb 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_typed_session.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_typed_session.py @@ -3,6 +3,7 @@ from datetime import datetime, timedelta from typing import Dict, Tuple, Optional +from ravendb import GetDatabaseRecordOperation from ravendb.documents.operations.time_series import ( TimeSeriesPolicy, TimeSeriesCollectionConfiguration, @@ -10,13 +11,14 @@ ConfigureTimeSeriesOperation, RawTimeSeriesPolicy, ) -from ravendb.documents.queries.time_series import TimeSeriesRawResult +from ravendb.documents.queries.time_series import TimeSeriesRawResult, TimeSeriesAggregationResult from ravendb.documents.session.time_series import ( ITimeSeriesValuesBindable, TypedTimeSeriesEntry, TypedTimeSeriesRollupEntry, ) from ravendb.infrastructure.entities import User +from ravendb.infrastructure.orders import Company from ravendb.primitives.time_series import TimeValue from ravendb.tests.test_base import TestBase from ravendb.tools.raven_test_helper import RavenTestHelper @@ -40,6 +42,14 @@ def get_time_series_mapping(self) -> Dict[int, Tuple[str, Optional[str]]]: return {0: ("heart_rate", None)} +class HeartRateMeasureWithCustomName(ITimeSeriesValuesBindable): + def __init__(self, value: float): + self.heart_rate = value + + def get_time_series_mapping(self) -> Dict[int, Tuple[str, Optional[str]]]: + return {0: ("heart_rate", "HR")} + + class BigMeasure(ITimeSeriesValuesBindable): def __init__(self, m1, m2, m3, m4, m5, m6): self.measure1 = m1 @@ -85,6 +95,31 @@ def get_time_series_mapping(self) -> Dict[int, Tuple[str, Optional[str]]]: } +class StockPriceWithBadAttributes(ITimeSeriesValuesBindable): + def __init__( + self, + open: Optional[float] = None, + close: Optional[float] = None, + high: Optional[float] = None, + low: Optional[float] = None, + volume: Optional[float] = None, + ): + self.open = open + self.close = close + self.high = high + self.low = low + self.volume = volume + + def get_time_series_mapping(self) -> Dict[int, Tuple[str, Optional[str]]]: + return { + 1: ("open", None), + 2: ("close", None), + 3: ("high", None), + 4: ("low", None), + 5: ("volume", None), + } + + class TestTimeSeriesTypedSession(TestBase): def setUp(self): super(TestTimeSeriesTypedSession, self).setUp() @@ -411,3 +446,190 @@ def test_can_work_with_rollup_time_series(self): res = ts.get(now - timedelta(milliseconds=1), now + timedelta(days=1)) self.assertEqual(1, len(res)) self.assertEqual(1, res[0].max.close) + + def test_mapping_needs_to_contain_consecutive_values_starting_from_zero(self): + self.assertRaisesWithMessage( + self.store.time_series.register_type, + RuntimeError, + "The mapping of 'StockPriceWithBadAttributes' must contain consecutive values starting from 0.", + Company, + StockPriceWithBadAttributes, + ) + + def test_can_query_time_series_aggregation_declare_syntax_all_docs_query(self): + base_line = RavenTestHelper.utc_today() + with self.store.open_session() as session: + session.store(User(), document_id) + tsf = session.typed_time_series_for(HeartRateMeasure, document_id) + m = HeartRateMeasure(59) + tsf.append(base_line + timedelta(minutes=61), m, tag1) + m.heart_rate = 79 + tsf.append(base_line + timedelta(minutes=62), m, tag1) + m.heart_rate = 69 + tsf.append(base_line + timedelta(minutes=63), m, tag1) + session.save_changes() + + with self.store.open_session() as session: + query = ( + session.advanced.raw_query( + "declare timeseries out(u)\n" + " {\n" + " from u.HeartRateMeasures between $start and $end\n" + " group by 1h\n" + " select min(), max(), first(), last()\n" + " }\n" + " from @all_docs as u\n" + " where id() == 'users/gracjan'\n" + " select out(u)", + TimeSeriesAggregationResult, + ) + .add_parameter("start", base_line) + .add_parameter("end", base_line + timedelta(days=1)) + ) + + agg = query.first().as_typed_result(HeartRateMeasure) + + self.assertEqual(3, agg.count) + self.assertEqual(1, len(agg.results)) + + val = agg.results[0] + self.assertEqual(59, val.first.heart_rate) + self.assertEqual(59, val.min.heart_rate) + + self.assertEqual(69, val.last.heart_rate) + self.assertEqual(79, val.max.heart_rate) + + self.assertEqual(base_line + timedelta(minutes=60), val.from_date) + self.assertEqual(base_line + timedelta(minutes=120), val.to_date) + + def test_can_query_time_series_aggregation_no_select_or_group_by(self): + base_line = RavenTestHelper.utc_today() + with self.store.open_session() as session: + for i in range(1, 4): + id_ = f"people/{i}" + session.store(User(name="Oren", age=i * 30), id_) + tsf = session.typed_time_series_for(HeartRateMeasure, id_) + tsf.append(base_line + timedelta(minutes=61), HeartRateMeasure(59), tag1) + tsf.append(base_line + timedelta(minutes=62), HeartRateMeasure(79), tag1) + tsf.append(base_line + timedelta(minutes=63), HeartRateMeasure(69), tag2) + tsf.append(base_line + timedelta(minutes=61, days=31), HeartRateMeasure(159), tag1) + tsf.append(base_line + timedelta(minutes=62, days=31), HeartRateMeasure(179), tag2) + tsf.append(base_line + timedelta(minutes=63, days=31), HeartRateMeasure(169), tag1) + + session.save_changes() + + with self.store.open_session() as session: + query = ( + session.advanced.raw_query( + "declare timeseries out(x)\n" + "{\n" + " from x.HeartRateMeasures between $start and $end\n" + "}\n" + "from Users as doc\n" + "where doc.age > 49\n" + "select out(doc)", + TimeSeriesRawResult, + ) + .add_parameter("start", base_line) + .add_parameter("end", base_line + timedelta(days=62)) + ) + + result = list(query) + + self.assertEqual(2, len(result)) + + for i in range(2): + agg_raw = result[i] + agg = agg_raw.as_typed_result(HeartRateMeasure) + + self.assertEqual(6, len(agg.results)) + + val = agg.results[0] + + self.assertEqual(1, len(val.values)) + self.assertEqual(59, val.value.heart_rate) + self.assertEqual(tag1, val.tag) + self.assertEqual(base_line + timedelta(minutes=61), val.timestamp) + + val = agg.results[1] + self.assertEqual(1, len(val.values)) + self.assertEqual(79, val.value.heart_rate) + self.assertEqual(tag1, val.tag) + self.assertEqual(base_line + timedelta(minutes=62), val.timestamp) + + val = agg.results[2] + self.assertEqual(1, len(val.values)) + self.assertEqual(69, val.value.heart_rate) + self.assertEqual(tag2, val.tag) + self.assertEqual(base_line + timedelta(minutes=63), val.timestamp) + + val = agg.results[3] + self.assertEqual(1, len(val.values)) + self.assertEqual(159, val.value.heart_rate) + self.assertEqual(tag1, val.tag) + self.assertEqual(base_line + timedelta(minutes=61, days=31), val.timestamp) + + val = agg.results[4] + self.assertEqual(1, len(val.values)) + self.assertEqual(179, val.value.heart_rate) + self.assertEqual(tag2, val.tag) + self.assertEqual(base_line + timedelta(minutes=62, days=31), val.timestamp) + + val = agg.results[5] + self.assertEqual(1, len(val.values)) + self.assertEqual(169, val.value.heart_rate) + self.assertEqual(tag1, val.tag) + self.assertEqual(base_line + timedelta(minutes=63, days=31), val.timestamp) + + def test_can_register_time_series_for_other_database(self): + with self.get_document_store() as store2: + self.store.time_series.for_database(store2.database).register_type(User, StockPrice) + self.store.time_series.for_database(store2.database).register("Users", "HeartRateMeasures", ["HeartRate"]) + + updated: TimeSeriesConfiguration = self.store.maintenance.server.send( + GetDatabaseRecordOperation(store2.database) + ).time_series + + self.assertIsNotNone(updated) + + heartrate = updated.get_names("users", "HeartRateMeasures") + self.assertEqual(1, len(heartrate)) + self.assertEqual("HeartRate", heartrate[0]) + + stock = updated.get_names("users", "StockPrices") + + self.assertEqual(5, len(stock)) + self.assertEqual("open", stock[0]) + self.assertEqual("close", stock[1]) + self.assertEqual("high", stock[2]) + self.assertEqual("low", stock[3]) + self.assertEqual("volume", stock[4]) + + def test_can_register_time_series(self): + self.store.time_series.register_type(User, StockPrice) + self.store.time_series.register("Users", "HeartRateMeasures", ["heartRate"]) + + updated: TimeSeriesConfiguration = self.store.maintenance.server.send( + GetDatabaseRecordOperation(self.store.database) + ).time_series + + # this method is case-insensitive + heart_rate = updated.get_names("users", "HeartRateMeasures") + self.assertEqual(1, len(heart_rate)) + self.assertEqual("heartRate", heart_rate[0]) + + stock = updated.get_names("users", "StockPrices") + self.assertEqual(5, len(stock)) + self.assertEqual(["open", "close", "high", "low", "volume"], stock) + + def test_can_register_time_series_with_custom_name(self): + self.store.time_series.register_type(User, HeartRateMeasureWithCustomName, "cn") + + updated: TimeSeriesConfiguration = self.store.maintenance.server.send( + GetDatabaseRecordOperation(self.store.database) + ).time_series + + # this method is case-insensitive + heart_rate = updated.get_names("users", "cn") + self.assertEqual(1, len(heart_rate)) + self.assertEqual("HR", heart_rate[0]) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_13100.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_13100.py new file mode 100644 index 00000000..f0d852d9 --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_13100.py @@ -0,0 +1,34 @@ +from ravendb import IndexSourceType +from ravendb.documents.indexes.definitions import IndexDefinitionHelper +from ravendb.tests.test_base import TestBase + + +class TestRavenDB13100(TestBase): + def setUp(self): + super().setUp() + + def test_can_detect_documents_index_source_method_syntax(self): + map_ = "docs.Users.OrderBy(user => user.Id).Select(user => new { user.Name })" + self.assertEqual(IndexSourceType.DOCUMENTS, IndexDefinitionHelper.detect_static_index_source_type(map_)) + + def test_can_detect_time_series_index_source_method_syntax(self): + map_ = ( + "timeSeries.Companies.SelectMany(ts => ts.Entries, (ts, entry) => new {" + " HeartBeat = entry.Values[0], " + " Date = entry.Timestamp.Date, " + " User = ts.DocumentId " + "});" + ) + self.assertEqual(IndexSourceType.TIME_SERIES, IndexDefinitionHelper.detect_static_index_source_type(map_)) + + def test_can_detect_time_series_index_source_linq_syntax_single_ts(self): + map_ = "from ts in timeSeries.Users" + self.assertEqual(IndexSourceType.TIME_SERIES, IndexDefinitionHelper.detect_static_index_source_type(map_)) + + def test_can_detect_time_series_index_source_linq_syntax_can_strip_white_space(self): + map_ = "\t\t \t from ts \t \t in \t \t timeSeries.Users" + self.assertEqual(IndexSourceType.TIME_SERIES, IndexDefinitionHelper.detect_static_index_source_type(map_)) + + def test_can_detect_time_series_index_source_linq_syntax_all_ts(self): + map_ = "from ts in timeSeries" + self.assertEqual(IndexSourceType.TIME_SERIES, IndexDefinitionHelper.detect_static_index_source_type(map_)) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_13456.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_13456.py index b3368d6f..1aac6ad1 100644 --- a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_13456.py +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_13456.py @@ -1,9 +1,9 @@ import unittest -from ravendb.documents.operations.configuration import ClientConfiguration, PutClientConfigurationOperation +from ravendb.documents.operations.configuration.definitions import ClientConfiguration +from ravendb.documents.operations.configuration.operations import PutClientConfigurationOperation from ravendb.documents.operations.statistics import GetStatisticsOperation from ravendb.documents.session.misc import SessionOptions, TransactionMode -from ravendb.exceptions.raven_exceptions import RavenException from ravendb.infrastructure.orders import Company from ravendb.tests.test_base import TestBase diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14164.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14164.py index c19c1734..8d8e96d1 100644 --- a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14164.py +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14164.py @@ -4,7 +4,7 @@ from ravendb.documents.session.loaders.include import TimeSeriesIncludeBuilder from ravendb.infrastructure.entities import User from ravendb.tests.test_base import TestBase - +from ravendb.tools.raven_test_helper import RavenTestHelper document_id = "users/gracjan" company_id = "companies/1-A" @@ -44,7 +44,7 @@ def test_can_get_time_series_with_include_tag_documents(self): session.save_changes() with self.store.open_session() as session: - get_results = session.time_series_for(document_id, ts_name1).get_include( + get_results = session.time_series_for(document_id, ts_name1).get_with_include( base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() ) @@ -90,7 +90,7 @@ def test_can_get_time_series_with_include_tags_and_parent_document(self): session.save_changes() with self.store.open_session() as session: - get_results = session.time_series_for(document_id, ts_name1).get_include( + get_results = session.time_series_for(document_id, ts_name1).get_with_include( base_line, base_line + timedelta(hours=2), lambda i: i.include_tags().include_document() ) @@ -141,7 +141,7 @@ def test_includes_should_affect_time_series_get_command_etag(self): session.save_changes() with self.store.open_session() as session: - get_results = session.time_series_for(document_id, ts_name1).get_include( + get_results = session.time_series_for(document_id, ts_name1).get_with_include( base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() ) @@ -178,7 +178,7 @@ def test_includes_should_affect_time_series_get_command_etag(self): session.save_changes() with self.store.open_session() as session: - get_results = session.time_series_for(document_id, ts_name1).get_include( + get_results = session.time_series_for(document_id, ts_name1).get_with_include( base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() ) @@ -210,7 +210,7 @@ def test_includes_should_affect_time_series_get_command_etag(self): session.save_changes() with self.store.open_session() as session: - get_results = session.time_series_for(document_id, ts_name1).get_include( + get_results = session.time_series_for(document_id, ts_name1).get_with_include( base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() ) self.assertEqual(1, session.advanced.number_of_requests) @@ -256,7 +256,7 @@ def test_can_get_time_series_with_include_cache_not_empty(self): self.assertEqual(base_line + timedelta(hours=1), get_results[-1].timestamp) # get [22:45 - 23:30] with includes - get_results = session.time_series_for(document_id, ts_name1).get_include( + get_results = session.time_series_for(document_id, ts_name1).get_with_include( base_line + timedelta(minutes=75), base_line + timedelta(hours=2), TimeSeriesIncludeBuilder.include_tags ) @@ -304,7 +304,7 @@ def test_can_get_time_series_with_include_tags_when_not_all_entries_have_tags(se session.save_changes() with self.store.open_session() as session: - get_results = session.time_series_for(document_id, ts_name1).get_include( + get_results = session.time_series_for(document_id, ts_name1).get_with_include( base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() ) @@ -333,3 +333,163 @@ def test_can_get_time_series_with_include_tags_when_not_all_entries_have_tags(se tag_doc = tag_documents.get("watches/sony") self.assertEqual("Sony", tag_doc.name) self.assertEqual(0.78, tag_doc.accuracy) + + def test_can_get_time_series_with_include_cache_not_empty_2(self): + tags = [tag1, tag2, tag3] + with self.store.open_session() as session: + session.store(User(name="poisson"), document_id) + session.store(Watch("FitBit", 0.855), tags[0]) + session.store(Watch("Apple", 0.9), tags[1]) + session.store(Watch("Sony", 0.78), tags[2]) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, ts_name1) + for i in range(121): + tag = tags[0 if i < 60 else 1 if i < 90 else 2] + tsf.append_single(base_line + timedelta(minutes=i), i, tag) + + session.save_changes() + + with self.store.open_session() as session: + # get [21:30 - 22:30] + get_results = session.time_series_for(document_id, ts_name1).get(base_line, base_line + timedelta(hours=1)) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(61, len(get_results)) + self.assertEqual(base_line, get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=1), get_results[-1].timestamp) + + # get [23:00 - 23:30] with includes + get_results = session.time_series_for(document_id, ts_name1).get( + base_line + timedelta(minutes=90), base_line + timedelta(hours=2) + ) + + self.assertEqual(2, session.advanced.number_of_requests) + + self.assertEqual(31, len(get_results)) + self.assertEqual(base_line + timedelta(minutes=90), get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=2), get_results[-1].timestamp) + + # get [22:30 - 22:45] with includes + get_results = session.time_series_for(document_id, ts_name1).get_with_include( + base_line + timedelta(hours=1), + base_line + timedelta(minutes=75), + lambda builder: builder.include_tags(), + ) + + self.assertEqual(3, session.advanced.number_of_requests) + + self.assertEqual(16, len(get_results)) + self.assertEqual(base_line + timedelta(hours=1), get_results[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=75), get_results[-1].timestamp) + + # should not go to server + watch = session.load(tags[1], Watch) + self.assertEqual(3, session.advanced.number_of_requests) + + self.assertEqual("Apple", watch.name) + + self.assertEqual(0.9, watch.accuracy) + + # tags[0] and tags[2] should not be in cache + + watch = session.load(tags[0], Watch) + self.assertEqual(4, session.advanced.number_of_requests) + + self.assertEqual("FitBit", watch.name) + self.assertEqual(0.855, watch.accuracy) + + watch = session.load(tags[2], Watch) + self.assertEqual(5, session.advanced.number_of_requests) + self.assertEqual("Sony", watch.name) + self.assertEqual(0.78, watch.accuracy) + + def test_can_get_multiple_ranges_with_includes(self): + tags = [tag1, tag2, tag3] + with self.store.open_session() as session: + session.store(User(name="poisson"), document_id) + session.store(Watch("FitBit", 0.855), tags[0]) + session.store(Watch("Apple", 0.9), tags[1]) + session.store(Watch("Sony", 0.78), tags[2]) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, ts_name1) + for i in range(121): + tsf.append_single(base_line + timedelta(minutes=i), i, tags[i % 3]) + session.save_changes() + + with self.store.open_session() as session: + # get [21:30 - 22:00] + get_results = session.time_series_for(document_id, ts_name1).get( + base_line, base_line + timedelta(minutes=30) + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(31, len(get_results)) + self.assertEqual(base_line, get_results[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=30), get_results[-1].timestamp) + + # get [22:15 - 22:30] + get_results = session.time_series_for(document_id, ts_name1).get( + base_line + timedelta(minutes=45), base_line + timedelta(minutes=60) + ) + + self.assertEqual(2, session.advanced.number_of_requests) + + self.assertEqual(16, len(get_results)) + self.assertEqual(base_line + timedelta(minutes=45), get_results[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=60), get_results[-1].timestamp) + + # get [22:15 - 22:30] + get_results = session.time_series_for(document_id, ts_name1).get( + base_line + timedelta(minutes=90), base_line + timedelta(minutes=120) + ) + + self.assertEqual(3, session.advanced.number_of_requests) + + self.assertEqual(31, len(get_results)) + self.assertEqual(base_line + timedelta(minutes=90), get_results[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=120), get_results[-1].timestamp) + + # get multiple ranges with includes + # ask for entire range [00:00 - 02:00] with includes + # this will go to server to get the "missing parts" - [00:30 - 00:45] and [01:00 - 01:30] + + get_results = session.time_series_for(document_id, ts_name1).get_with_include( + base_line, base_line + timedelta(minutes=120), lambda x: x.include_tags().include_document() + ) + + self.assertEqual(4, session.advanced.number_of_requests) + + self.assertEqual(121, len(get_results)) + self.assertEqual(base_line + timedelta(minutes=0), get_results[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=120), get_results[-1].timestamp) + + # should not go to server + user = session.load(document_id, User) + self.assertEqual(4, session.advanced.number_of_requests) + self.assertEqual("poisson", user.name) + + # should not go to server + tag_documents = session.load(tags, Watch) + self.assertEqual(4, session.advanced.number_of_requests) + + # assert tag documents + + self.assertEqual(3, len(tag_documents)) + + tag_doc = tag_documents.get("watches/fitbit") + self.assertEqual("FitBit", tag_doc.name) + self.assertEqual(0.855, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/apple") + self.assertEqual("Apple", tag_doc.name) + self.assertEqual(0.9, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/sony") + self.assertEqual("Sony", tag_doc.name) + self.assertEqual(0.78, tag_doc.accuracy) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14919.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14919.py index cf9d0c42..9212c7c4 100644 --- a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14919.py +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14919.py @@ -11,9 +11,9 @@ def test_get_documents_command_should_discard_null_ids_post_get(self): with self.store.open_session() as session: ids = [] for i in range(1000): - id = f"users/{i}" - ids.append(id) - session.store(User(), id) + id_ = f"users/{i}" + ids.append(id_) + session.store(User(), id_) session.save_changes() ids.extend([None for _ in range(24)]) @@ -30,9 +30,9 @@ def test_get_documents_command_should_discard_null_ids(self): with self.store.open_session() as session: ids = [] for i in range(100): - id = f"users/{i}" - ids.append(id) - session.store(User(), id) + id_ = f"users/{i}" + ids.append(id_) + session.store(User(), id_) session.save_changes() ids.extend([None for _ in range(24)]) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15029.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15029.py new file mode 100644 index 00000000..ac43003f --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15029.py @@ -0,0 +1,44 @@ +from datetime import datetime, timedelta + +from ravendb.documents.queries.time_series import TimeSeriesRawResult +from ravendb.infrastructure.entities import User +from ravendb.tests.test_base import TestBase + +document_id = "users/ayende" +base_line = datetime(2023, 8, 20, 21, 00) +base_line2 = base_line - timedelta(days=1) +ts_name_1 = "Heartrate" +ts_name_2 = "BloodPressure" +tag1 = "watches/fitbit" +tag2 = "watches/apple" + + +class TestRavenDB15029(TestBase): + def setUp(self): + super(TestRavenDB15029, self).setUp() + + def test_session_raw_query_should_not_track_time_series_results_as_document(self): + with self.store.open_session() as session: + session.store(User(name="Karmel"), "users/karmel") + session.time_series_for("users/karmel", ts_name_1).append_single(base_line, 60, tag1) + session.save_changes() + + with self.store.open_session() as session: + u = session.load("users/karmel", User) + query = session.advanced.raw_query( + "declare timeseries out()\n" + "{\n" + " from HeartRate\n" + "}\n" + "from Users as u\n" + "where name = 'Karmel'\n" + "select out()", + TimeSeriesRawResult, + ) + + result = query.first() + + self.assertEqual(1, result.count) + self.assertEqual(60.0, result.results[0].value) + self.assertEqual(base_line, result.results[0].timestamp) + self.assertEqual(tag1, result.results[0].tag) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15246.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15246.py new file mode 100644 index 00000000..5e2f995e --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15246.py @@ -0,0 +1,322 @@ +from datetime import timedelta + +from ravendb.documents.operations.time_series import GetMultipleTimeSeriesOperation +from ravendb.documents.session.time_series import TimeSeriesRange +from ravendb.primitives.constants import int_max +from ravendb.tests.test_base import TestBase, User +from ravendb.tools.raven_test_helper import RavenTestHelper + + +class TestRavenDB15246(TestBase): + def setUp(self): + super().setUp() + + def test_results_with_range_and_page_size(self): + tag = "raven" + id_ = "users/1" + base_line = RavenTestHelper.utc_this_month() + + with self.store.open_session() as session: + session.store(User(), id_) + tsf = session.time_series_for(id_, tag) + for i in range(16): + tsf.append_single(base_line + timedelta(minutes=i), i, "watches/apple") + + session.save_changes() + + ranges_list = [ + TimeSeriesRange("raven", base_line + timedelta(minutes=0), base_line + timedelta(minutes=3)), + TimeSeriesRange("raven", base_line + timedelta(minutes=4), base_line + timedelta(minutes=7)), + TimeSeriesRange("raven", base_line + timedelta(minutes=8), base_line + timedelta(minutes=11)), + ] + + re = self.store.get_request_executor() + + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list, 0, 0) + re.execute_command(ts_command) + + res = ts_command.result + self.assertEqual(0, len(res.values)) + + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list, 0, 30) + re.execute_command(ts_command) + + res = ts_command.result + self.assertEqual(1, len(res.values)) + self.assertEqual(3, len(res.values.get("raven"))) + + self.assertEqual(4, len(res.values.get("raven")[0].entries)) + self.assertEqual(4, len(res.values.get("raven")[1].entries)) + self.assertEqual(4, len(res.values.get("raven")[2].entries)) + + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list, 0, 6) + re.execute_command(ts_command) + + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(2, len(res.values.get("raven"))) + + self.assertEqual(4, len(res.values.get("raven")[0].entries)) + self.assertEqual(2, len(res.values.get("raven")[1].entries)) + + def test_client_cache_with_start(self): + base_line = RavenTestHelper.utc_this_month() + + with self.store.open_session() as session: + session.store(User(), "users/1-A") + tsf = session.time_series_for("users/1-A", "Heartrate") + for i in range(20): + tsf.append_single(base_line + timedelta(minutes=i), i, "watches/apple") + session.save_changes() + + with self.store.open_session() as session: + user = session.load("users/1-A", User) + ts = session.time_series_for_entity(user, "Heartrate") + + self.assertEqual(1, session.advanced.number_of_requests) + + res = ts.get(start=20) + self.assertEqual(0, len(res)) + self.assertEqual(2, session.advanced.number_of_requests) + + res = ts.get(start=10) + self.assertEqual(10, len(res)) + self.assertEqual(base_line + timedelta(minutes=10), res[0].timestamp) + self.assertEqual(3, session.advanced.number_of_requests) + + res = ts.get(start=0) + self.assertEqual(20, len(res)) + self.assertEqual(base_line + timedelta(minutes=10), res[10].timestamp) + self.assertEqual(4, session.advanced.number_of_requests) + + res = ts.get(start=10) + self.assertEqual(10, len(res)) + self.assertEqual(base_line + timedelta(minutes=10), res[0].timestamp) + self.assertEqual(4, session.advanced.number_of_requests) + + res = ts.get(start=20) + self.assertEqual(0, len(res)) + + def test_get_results_with_range(self): + base_line = RavenTestHelper.utc_this_month() + id_ = "users/1-A" + with self.store.open_session() as session: + session.store(User(), id_) + tsf = session.time_series_for(id_, "raven") + for i in range(8): + tsf.append_single(base_line + timedelta(minutes=i), 64, "watches/apple") + + session.save_changes() + + ranges_list = [ + TimeSeriesRange("raven", base_line + timedelta(minutes=0), base_line + timedelta(minutes=3)), + TimeSeriesRange("raven", base_line + timedelta(minutes=4), base_line + timedelta(minutes=7)), + TimeSeriesRange("raven", base_line + timedelta(minutes=8), base_line + timedelta(minutes=11)), + ] + + re = self.store.get_request_executor() + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list, 0, 10) + re.execute_command(ts_command) + + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(3, len(res.values.get("raven"))) + + self.assertEqual(4, len(res.values.get("raven")[0].entries)) + self.assertEqual(4, len(res.values.get("raven")[1].entries)) + self.assertEqual(0, len(res.values.get("raven")[2].entries)) + + tsf = session.time_series_for(id_, "raven") + for i in range(8, 11): + tsf.append_single(base_line + timedelta(minutes=i), 1000, "watches/apple") + + session.save_changes() + + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list, 0, 10) + + re.execute_command(ts_command) + + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(3, len(res.values.get("raven"))) + + self.assertEqual(4, len(res.values.get("raven")[0].entries)) + self.assertEqual(4, len(res.values.get("raven")[1].entries)) + self.assertEqual(2, len(res.values.get("raven")[2].entries)) + + def test_results_with_range_and_start(self): + tag = "raven" + id_ = "users/1" + base_line = RavenTestHelper.utc_this_month() + + with self.store.open_session() as session: + session.store(User(), id_) + tsf = session.time_series_for(id_, tag) + for i in range(16): + tsf.append_single(base_line + timedelta(minutes=i), i, "watches/apple") + + session.save_changes() + + ranges_list = [ + TimeSeriesRange("raven", base_line + timedelta(minutes=0), base_line + timedelta(minutes=3)), + TimeSeriesRange("raven", base_line + timedelta(minutes=4), base_line + timedelta(minutes=7)), + TimeSeriesRange("raven", base_line + timedelta(minutes=8), base_line + timedelta(minutes=11)), + ] + + re = self.store.get_request_executor() + + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list, 0, 20) + re.execute_command(ts_command) + + res = ts_command.result + self.assertEqual(1, len(res.values)) + self.assertEqual(3, len(res.values.get("raven"))) + + self.assertEqual(4, len(res.values.get("raven")[0].entries)) + self.assertEqual(4, len(res.values.get("raven")[1].entries)) + self.assertEqual(4, len(res.values.get("raven")[2].entries)) + + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list, 3, 20) + re.execute_command(ts_command) + + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(3, len(res.values.get("raven"))) + + self.assertEqual(1, len(res.values.get("raven")[0].entries)) + self.assertEqual(4, len(res.values.get("raven")[1].entries)) + self.assertEqual(4, len(res.values.get("raven")[2].entries)) + + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list, 9, 20) + re.execute_command(ts_command) + + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(3, len(res.values.get("raven"))) + + self.assertEqual(0, len(res.values.get("raven")[0].entries)) + self.assertEqual(0, len(res.values.get("raven")[1].entries)) + self.assertEqual(3, len(res.values.get("raven")[2].entries)) + + def test_ranges(self): + base_line = RavenTestHelper.utc_this_month() + id_ = "users/1-A" + + with self.store.open_session() as session: + session.store(User(), id_) + tsf = session.time_series_for(id_, "raven") + for i in range(11): + tsf.append_single(base_line + timedelta(minutes=i), i, "watches/apple") + for i in range(12, 14): + tsf.append_single(base_line + timedelta(minutes=i), i, "watches/apple") + for i in range(16, 21): + tsf.append_single(base_line + timedelta(minutes=i), i, "watches/apple") + + session.save_changes() + + ranges_list = [] + time_series_range = TimeSeriesRange( + "raven", base_line + timedelta(minutes=1), base_line + timedelta(milliseconds=7) + ) + ranges_list.append(time_series_range) + + re = self.store.get_request_executor() + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list) + re.execute_command(ts_command) + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(1, len(res.values.get("raven"))) + + ranges_list = [ + TimeSeriesRange("raven", base_line + timedelta(minutes=8), base_line + timedelta(minutes=11)) + ] + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list) + re.execute_command(ts_command) + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(1, len(res.values.get("raven"))) + + ranges_list = [ + TimeSeriesRange("raven", base_line + timedelta(minutes=8), base_line + timedelta(minutes=17)) + ] + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list) + re.execute_command(ts_command) + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(1, len(res.values.get("raven"))) + + ranges_list = [ + TimeSeriesRange("raven", base_line + timedelta(minutes=14), base_line + timedelta(minutes=15)) + ] + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list) + re.execute_command(ts_command) + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(1, len(res.values.get("raven"))) + + ranges_list = [ + TimeSeriesRange("raven", base_line + timedelta(minutes=23), base_line + timedelta(minutes=25)) + ] + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list) + re.execute_command(ts_command) + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(1, len(res.values.get("raven"))) + + ranges_list = [ + TimeSeriesRange("raven", base_line + timedelta(minutes=20), base_line + timedelta(minutes=26)) + ] + ts_command = GetMultipleTimeSeriesOperation.GetMultipleTimeSeriesCommand(id_, ranges_list) + re.execute_command(ts_command) + res = ts_command.result + + self.assertEqual(1, len(res.values)) + self.assertEqual(1, len(res.values.get("raven"))) + + def test_client_cache_with_page_size(self): + base_line = RavenTestHelper.utc_this_month() + + with self.store.open_session() as session: + session.store(User(), "users/1-A") + tsf = session.time_series_for("users/1-A", "Heartrate") + for i in range(21): + tsf.append_single(base_line + timedelta(minutes=i), i, "watches/apple") + session.save_changes() + + with self.store.open_session() as session: + user = session.load("users/1-A", User) + ts = session.time_series_for_entity(user, "Heartrate") + res = ts.get(None, None, 0, 0) + self.assertEqual(0, len(res)) + + self.assertEqual(1, session.advanced.number_of_requests) + + res = ts.get(start=0, page_size=10) + self.assertEqual(10, len(res)) + + self.assertEqual(2, session.advanced.number_of_requests) + + res = ts.get(start=0, page_size=7) + self.assertEqual(7, len(res)) + + self.assertEqual(2, session.advanced.number_of_requests) + + res = ts.get(start=0, page_size=20) + self.assertEqual(20, len(res)) + + self.assertEqual(3, session.advanced.number_of_requests) + + res = ts.get(start=0, page_size=25) + self.assertEqual(21, len(res)) + + self.assertEqual(3, session.advanced.number_of_requests) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15792.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15792.py new file mode 100644 index 00000000..d3f3ff4b --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15792.py @@ -0,0 +1,34 @@ +from ravendb.documents.queries.time_series import TimeSeriesRawResult +from ravendb.infrastructure.entities import User +from ravendb.tests.test_base import TestBase +from ravendb.tools.raven_test_helper import RavenTestHelper + + +class TestRavenDB15792(TestBase): + def setUp(self): + super().setUp() + + def test_can_query_time_series_with_spaces_in_name(self): + document_id = "users/ayende" + base_line = RavenTestHelper.utc_this_month() + + with self.store.open_session() as session: + session.store(User(), document_id) + + tsf = session.time_series_for(document_id, "gas m3 usage") + tsf.append_single(base_line, 1) + + session.save_changes() + + with self.store.open_session() as session: + query = session.advanced.raw_query( + "declare timeseries out()\n" "{\n" ' from "gas m3 usage"\n' "}\n" "from Users as u\n" "select out()", + TimeSeriesRawResult, + ) + + result = query.first() + self.assertIsNotNone(result) + + results = result.results + + self.assertEqual(1, len(results)) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_16906.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_16906.py new file mode 100644 index 00000000..2d52870f --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_16906.py @@ -0,0 +1,14 @@ +from ravendb.infrastructure.entities import User +from ravendb.tests.test_base import TestBase + + +class TestRavenDB16906(TestBase): + def setUp(self): + super().setUp() + + def test_time_series_for_should_throw_better_error_on_null_entity(self): + with self.store.open_session() as session: + user = session.load("users/1", User) + self.assertRaisesWithMessage( + session.time_series_for_entity, ValueError, "Entity cannot be None", user, "heartRate" + ) diff --git a/ravendb/tests/operations_tests/test_time_series_operation.py b/ravendb/tests/operations_tests/test_time_series_operation.py index c4fcbf84..4e07a9db 100644 --- a/ravendb/tests/operations_tests/test_time_series_operation.py +++ b/ravendb/tests/operations_tests/test_time_series_operation.py @@ -9,6 +9,7 @@ import unittest from ravendb.tests.test_base import TestBase, User +from ravendb.tools.raven_test_helper import RavenTestHelper class TestTimeSeriesOperations(TestBase): @@ -83,6 +84,23 @@ def test_get_time_series_without_range(self): ts_range_result = self.store.operations.send(GetTimeSeriesOperation("users/1-A", self.ts_name)) self.assertEqual(len(ts_range_result.entries), 3) + def test_appending_second_time_series_value_at_the_same_timestamp_will_replace_the_previous_value(self): + base_line = RavenTestHelper.utc_this_month() + with self.store.open_session() as session: + session.store(User("Gracjan"), "users/gracjan") + tsf = session.time_series_for("users/gracjan", "stonks") + tsf.append_single(base_line, 1) + tsf.append_single(base_line, 2) + tsf.append_single(base_line, 3) + session.save_changes() + + with self.store.open_session() as session: + gracjan = session.load("users/gracjan", User) + ts = session.time_series_for_entity(gracjan, "stonks").get() + self.assertEqual(1, len(ts)) + self.assertEqual(1, len(ts[0].values)) + self.assertEqual(3, ts[0].values[0]) + if __name__ == "__main__": unittest.main() diff --git a/ravendb/tests/session_tests/test_advanced.py b/ravendb/tests/session_tests/test_advanced.py index 5516574e..d0970816 100644 --- a/ravendb/tests/session_tests/test_advanced.py +++ b/ravendb/tests/session_tests/test_advanced.py @@ -27,8 +27,8 @@ def test_get_document_id_after_save(self): user = User("U", 1) s.store(user, "test/") s.save_changes() - id = s.advanced.get_document_id(user) - self.assertFalse(id.endswith("/")) + id_ = s.advanced.get_document_id(user) + self.assertFalse(id_.endswith("/")) @unittest.skip("Query streaming") def test_stream_query(self): diff --git a/ravendb/tools/raven_test_helper.py b/ravendb/tools/raven_test_helper.py index e84d5165..320bf0be 100644 --- a/ravendb/tools/raven_test_helper.py +++ b/ravendb/tools/raven_test_helper.py @@ -11,6 +11,11 @@ def utc_today() -> datetime: today = datetime.today() return datetime(today.year, today.month, today.day, 0, 0, 0, 0) + @staticmethod + def utc_this_month() -> datetime: + today = datetime.today() + return datetime(today.year, today.month, 1, 0, 0, 0, 0) + @staticmethod def assert_no_index_errors(store: DocumentStore, database_name: Optional[str] = None) -> None: errors = store.maintenance.for_database(database_name).send(GetIndexErrorsOperation())