diff --git a/ravendb/documents/bulk_insert_operation.py b/ravendb/documents/bulk_insert_operation.py index 99ea3ba9..1e90360a 100644 --- a/ravendb/documents/bulk_insert_operation.py +++ b/ravendb/documents/bulk_insert_operation.py @@ -10,7 +10,7 @@ from typing import Optional, TYPE_CHECKING import requests -from ravendb import constants +from ravendb.primitives import constants from ravendb.exceptions.raven_exceptions import RavenException from ravendb.http.server_node import ServerNode from ravendb.http.raven_command import RavenCommand diff --git a/ravendb/documents/commands/batches.py b/ravendb/documents/commands/batches.py index 9e4eb3eb..1f3b754d 100644 --- a/ravendb/documents/commands/batches.py +++ b/ravendb/documents/commands/batches.py @@ -563,7 +563,7 @@ def __init__( source_name: str, destination_document_id: str, destination_name: str, - change_vector: str, + change_vector: Optional[str], ): if source_document_id.isspace(): raise ValueError("source_document_id is required") @@ -595,7 +595,7 @@ def __init__( name: str, destination_id: str, destination_name: str, - change_vector: str, + change_vector: Optional[str], ): if key.isspace(): raise ValueError("source_document_id is required") @@ -621,7 +621,7 @@ def serialize(self, conventions: DocumentConventions) -> dict: class DeleteAttachmentCommandData(CommandData): - def __init__(self, document_id: str, name: str, change_vector: str): + def __init__(self, document_id: str, name: str, change_vector: Optional[str]): if not document_id: raise ValueError(document_id) if not name: diff --git a/ravendb/documents/commands/crud.py b/ravendb/documents/commands/crud.py index edd55d5f..c474c3d2 100644 --- a/ravendb/documents/commands/crud.py +++ b/ravendb/documents/commands/crud.py @@ -3,10 +3,15 @@ import http import json from typing import Optional, List -from typing import TYPE_CHECKING import requests -from ravendb import constants +from ravendb.documents.session.time_series import ( + TimeSeriesRange, + TimeSeriesTimeRange, + TimeSeriesCountRange, + AbstractTimeSeriesRange, +) +from ravendb.primitives import constants from ravendb.documents.commands.results import GetDocumentsResult from ravendb.documents.queries.utils import HashCalculator from ravendb.http.misc import ResponseDisposeHandling @@ -18,9 +23,6 @@ from ravendb.http.server_node import ServerNode from ravendb.tools.utils import Utils -if TYPE_CHECKING: - from ravendb.documents.conventions import DocumentConventions - class PutResult: def __init__(self, key: Optional[str] = None, change_vector: Optional[str] = None): @@ -76,7 +78,7 @@ def create_request(self, node: ServerNode) -> requests.Request: class HeadDocumentCommand(RavenCommand[str]): - def __init__(self, key: str, change_vector: str): + def __init__(self, key: str, change_vector: Optional[str]): super(HeadDocumentCommand, self).__init__(str) if key is None: raise ValueError("Key cannot be None") @@ -124,7 +126,7 @@ def __init__(self): self._key: Optional[str] = None self._counters: Optional[List[str]] = None self._include_all_counters: Optional[bool] = None - self._time_series_includes: Optional[List] = None # todo: AbstractTimeSeriesRange + self._time_series_includes: Optional[List[AbstractTimeSeriesRange]] = None self._compare_exchange_value_includes: Optional[List[str]] = None @@ -248,19 +250,38 @@ def create_request(self, node: ServerNode) -> requests.Request: for include in self._includes: path_builder.append(f"&include={include}") - # todo: counters if self._include_all_counters: path_builder.append(f"&counter={constants.Counters.ALL}") elif self._counters: for counter in self._counters: path_builder.append(f"&counter={counter}") - # todo: time series if self._time_series_includes is not None: - for time_series in self._time_series_includes: - path_builder.append( - f"×eries={time_series.name}&from={time_series.from_date}&to={time_series.to_date}" - ) + for ts_include in self._time_series_includes: + if isinstance(ts_include, TimeSeriesRange): + range_: TimeSeriesRange = ts_include + path_builder.append( + f"×eries={range_.name}" + f"&from={Utils.datetime_to_string(range_.from_date) if range_.from_date else ''}" + f"&to={Utils.datetime_to_string(range_.to_date) if range_.to_date else ''}" + ) + elif isinstance(ts_include, TimeSeriesTimeRange): + time_range: TimeSeriesTimeRange = ts_include + path_builder.append( + f"×eriestime={time_range.name}" + f"&timeType={time_range.type.value}" + f"&timeValue={time_range.time.value}" + f"&timeUnit={time_range.time.unit.value}" + ) + elif isinstance(ts_include, TimeSeriesCountRange): + count_range: TimeSeriesCountRange = ts_include + path_builder.append( + f"×eriescount={count_range.name}" + f"&countType={count_range.type.value}" + f"&countValue={count_range.count}" + ) + else: + raise TypeError(f"Unexpected TimeSeries range {ts_include.__class__.__name__}") if self._compare_exchange_value_includes is not None: for compare_exchange_value in self._compare_exchange_value_includes: @@ -368,7 +389,7 @@ def create_request(self, node: ServerNode) -> requests.Request: class HeadAttachmentCommand(RavenCommand[str]): - def __init__(self, document_id: str, name: str, change_vector: str): + def __init__(self, document_id: str, name: str, change_vector: Optional[str]): super().__init__(str) if document_id.isspace(): diff --git a/ravendb/documents/commands/multi_get.py b/ravendb/documents/commands/multi_get.py index 7d91b1e0..0e5c65cb 100644 --- a/ravendb/documents/commands/multi_get.py +++ b/ravendb/documents/commands/multi_get.py @@ -6,7 +6,7 @@ import requests -from ravendb import constants +from ravendb.primitives import constants from ravendb.http.misc import AggressiveCacheOptions, AggressiveCacheMode from ravendb.http.request_executor import RequestExecutor from ravendb.extensions.http_extensions import HttpExtensions diff --git a/ravendb/documents/conventions.py b/ravendb/documents/conventions.py index fe5e95c9..bbd514c1 100644 --- a/ravendb/documents/conventions.py +++ b/ravendb/documents/conventions.py @@ -11,7 +11,7 @@ from typing import TypeVar from ravendb.json.metadata_as_dictionary import MetadataAsDictionary -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.operations.configuration import ClientConfiguration, LoadBalanceBehavior, ReadBalanceBehavior from ravendb.documents.indexes.definitions import SortOptions from ravendb.tools.utils import Utils diff --git a/ravendb/documents/indexes/counters.py b/ravendb/documents/indexes/counters.py index a437aa92..99cb4cc6 100644 --- a/ravendb/documents/indexes/counters.py +++ b/ravendb/documents/indexes/counters.py @@ -1,6 +1,6 @@ -from typing import Dict, Set, Optional, Callable, Union +from typing import Dict, Set, Optional, Callable -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.conventions import DocumentConventions from ravendb.documents.indexes.definitions import ( FieldStorage, diff --git a/ravendb/documents/indexes/index_creation.py b/ravendb/documents/indexes/index_creation.py index 9632fbae..aea06ff4 100644 --- a/ravendb/documents/indexes/index_creation.py +++ b/ravendb/documents/indexes/index_creation.py @@ -2,7 +2,7 @@ from abc import abstractmethod, ABC from typing import Generic, TypeVar, Union, Dict, Set, Callable, Optional, List, Collection -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.conventions import DocumentConventions from ravendb.documents.indexes.spatial.configuration import SpatialOptionsFactory from ravendb.documents.store.definition import DocumentStore, DocumentStoreBase diff --git a/ravendb/documents/operations/attachments/__init__.py b/ravendb/documents/operations/attachments/__init__.py index 41490065..0327e6ad 100644 --- a/ravendb/documents/operations/attachments/__init__.py +++ b/ravendb/documents/operations/attachments/__init__.py @@ -6,7 +6,7 @@ import requests -from ravendb import constants +from ravendb.primitives import constants from ravendb.data.operation import AttachmentType from ravendb.documents.operations.definitions import IOperation, VoidOperation from ravendb.http.http_cache import HttpCache @@ -152,7 +152,7 @@ def is_read_request(self) -> bool: class GetAttachmentOperation(IOperation): - def __init__(self, document_id: str, name: str, attachment_type: AttachmentType, change_vector: str): + def __init__(self, document_id: str, name: str, attachment_type: AttachmentType, change_vector: Optional[str]): if document_id is None: raise ValueError("Invalid document_id") if name is None: diff --git a/ravendb/documents/operations/batch.py b/ravendb/documents/operations/batch.py index 1fee8f93..ac141204 100644 --- a/ravendb/documents/operations/batch.py +++ b/ravendb/documents/operations/batch.py @@ -1,7 +1,7 @@ from copy import deepcopy from typing import Union, List, Dict, TYPE_CHECKING, Optional -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.commands.batches import SingleNodeBatchCommand, ClusterWideBatchCommand, CommandType from ravendb.documents.operations.patch import PatchStatus from ravendb.documents.session.event_args import AfterSaveChangesEventArgs diff --git a/ravendb/documents/operations/compare_exchange/compare_exchange.py b/ravendb/documents/operations/compare_exchange/compare_exchange.py index 0eb5c970..487f2cf5 100644 --- a/ravendb/documents/operations/compare_exchange/compare_exchange.py +++ b/ravendb/documents/operations/compare_exchange/compare_exchange.py @@ -3,7 +3,7 @@ from enum import Enum from typing import Union, Optional, Generic, TypeVar, Type -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.conventions import DocumentConventions from ravendb.documents.commands.batches import PutCompareExchangeCommandData, DeleteCompareExchangeCommandData from ravendb.documents.session.document_session_operations.misc import _update_metadata_modifications diff --git a/ravendb/documents/operations/compare_exchange/compare_exchange_value_result_parser.py b/ravendb/documents/operations/compare_exchange/compare_exchange_value_result_parser.py index 110a20af..da54ff36 100644 --- a/ravendb/documents/operations/compare_exchange/compare_exchange_value_result_parser.py +++ b/ravendb/documents/operations/compare_exchange/compare_exchange_value_result_parser.py @@ -1,7 +1,7 @@ import json from typing import Dict, Type, TypeVar, Optional -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.conventions import DocumentConventions from ravendb.documents.operations.compare_exchange.compare_exchange import CompareExchangeValue from ravendb.json.metadata_as_dictionary import MetadataAsDictionary diff --git a/ravendb/documents/operations/compare_exchange/operations.py b/ravendb/documents/operations/compare_exchange/operations.py index e85a6e2e..daf0dd64 100644 --- a/ravendb/documents/operations/compare_exchange/operations.py +++ b/ravendb/documents/operations/compare_exchange/operations.py @@ -1,10 +1,10 @@ from __future__ import annotations import json -from typing import Union, Optional, Generic, TypeVar, Dict, TYPE_CHECKING, Type, Collection +from typing import Optional, Generic, TypeVar, Dict, TYPE_CHECKING, Type, Collection import requests -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.conventions import DocumentConventions from ravendb.documents.operations.compare_exchange.compare_exchange import ( CompareExchangeValue, @@ -21,7 +21,7 @@ from ravendb.json.metadata_as_dictionary import MetadataAsDictionary from ravendb.documents.session.entity_to_json import EntityToJson from ravendb.tools.utils import Utils -from ravendb.util.util import RaftIdGenerator, StartingWithOptions +from ravendb.util.util import RaftIdGenerator _T = TypeVar("_T") diff --git a/ravendb/documents/operations/time_series.py b/ravendb/documents/operations/time_series.py index 5d85b113..607f1d7f 100644 --- a/ravendb/documents/operations/time_series.py +++ b/ravendb/documents/operations/time_series.py @@ -1,51 +1,52 @@ from __future__ import annotations -import abc import datetime import json from typing import Dict, Optional, List, Any, TYPE_CHECKING, Callable import requests -from ravendb.constants import int_max +from ravendb.primitives.constants import int_max from ravendb.documents.session.loaders.include import TimeSeriesIncludeBuilder -from ravendb.documents.session.time_series import TimeSeriesEntry +from ravendb.documents.session.time_series import TimeSeriesEntry, AbstractTimeSeriesRange from ravendb.http.http_cache import HttpCache from ravendb.http.server_node import ServerNode from ravendb.http.topology import RaftCommand from ravendb.http.raven_command import RavenCommand, VoidRavenCommand from ravendb.documents.operations.definitions import MaintenanceOperation, IOperation, VoidOperation +from ravendb.primitives.time_series import TimeValue from ravendb.tools.utils import Utils from ravendb.util.util import RaftIdGenerator +from ravendb.documents.conventions import DocumentConventions if TYPE_CHECKING: - from ravendb.documents.conventions import DocumentConventions from ravendb.documents.store.definition import DocumentStore class TimeSeriesPolicy: def __init__( self, - name: str, - aggregation_time: Optional[datetime.timedelta] = None, - retention_time: datetime.timedelta = datetime.timedelta.max, + name: Optional[str] = None, + aggregation_time: Optional[TimeValue] = None, + retention_time: TimeValue = TimeValue.MAX_VALUE(), ): if not name or name.isspace(): raise ValueError("Name cannot be None or empty") - if aggregation_time is not None and aggregation_time < datetime.timedelta(0): + if aggregation_time is not None and aggregation_time.compare_to(TimeValue.ZERO()) <= 0: raise ValueError("Aggregation time must be greater than zero") - if retention_time < datetime.timedelta(0): + if retention_time is not None and retention_time.compare_to(TimeValue.ZERO()) <= 0: raise ValueError("Retention time must be greater than zero") - self.name = name self.retention_time = retention_time self.aggregation_time = aggregation_time + self.name = name + @classmethod def from_json(cls, json_dict: Dict[str, Any]) -> TimeSeriesPolicy: return cls( - json_dict["Name"], + json_dict["Name"], # todo: Invalid deserialization Utils.string_to_timedelta(json_dict["AggregationTime"]), Utils.string_to_timedelta(json_dict["RetentionTime"]), ) @@ -55,7 +56,7 @@ def get_time_series_name(self, raw_name: str) -> str: def to_json(self) -> Dict[str, Any]: return { - "Name": self.name, + "Name": self.name, # todo: Invalid serialization "AggregationTime": Utils.timedelta_to_str(self.aggregation_time), "RetentionTime": Utils.timedelta_to_str(self.retention_time), } @@ -66,14 +67,14 @@ class RawTimeSeriesPolicy(TimeSeriesPolicy): @classmethod def DEFAULT_POLICY(cls) -> RawTimeSeriesPolicy: - return cls() + return cls(TimeValue.MAX_VALUE()) - def __init__(self, retention_time: Optional[datetime.timedelta] = datetime.timedelta.max): - if retention_time != datetime.timedelta.max: - if retention_time <= datetime.timedelta(0): - raise ValueError("Retention time must be greater than zero") + def __init__(self, retention_time: TimeValue = TimeValue.MAX_VALUE()): + if retention_time.compare_to(TimeValue.ZERO()) <= 0: + raise ValueError("Retention time must be greater than zero") - super(RawTimeSeriesPolicy, self).__init__(self.POLICY_STRING, None, retention_time) + self.name = self.POLICY_STRING + self.retention_time = retention_time class TimeSeriesCollectionConfiguration: @@ -340,9 +341,9 @@ def __init__(self, name: Optional[str] = None): def to_json(self) -> Dict[str, Any]: json_dict = {"Name": self.name} if self._appends: - json_dict["Appends"] = self._appends + json_dict["Appends"] = [append_op.to_json() for append_op in self._appends] if self._deletes: - json_dict["Deletes"] = self._deletes + json_dict["Deletes"] = [delete_op.to_json() for delete_op in self._deletes] return json_dict def append(self, append_operation: AppendOperation) -> None: @@ -363,18 +364,6 @@ def delete(self, delete_operation: DeleteOperation) -> None: self._deletes.append(delete_operation) -class AbstractTimeSeriesRange(abc.ABC): - def __init__(self, name: str): - self.name = name - - -class TimeSeriesRange(AbstractTimeSeriesRange): - def __init__(self, name: str, from_date: Optional[datetime.datetime], to_date: Optional[datetime.datetime]): - super().__init__(name) - self.from_date = from_date - self.to_date = to_date - - class TimeSeriesRangeResult: def __init__( self, @@ -529,7 +518,7 @@ class GetMultipleTimeSeriesOperation(IOperation[TimeSeriesDetails]): def __init__( self, doc_id: str, - ranges: List[TimeSeriesRange], + ranges: List[AbstractTimeSeriesRange], start: Optional[int] = 0, page_size: Optional[int] = int_max, includes: Optional[Callable[[TimeSeriesIncludeBuilder], None]] = None, @@ -555,7 +544,7 @@ class GetMultipleTimeSeriesCommand(RavenCommand[TimeSeriesDetails]): def __init__( self, doc_id: str, - ranges: List[TimeSeriesRange], + ranges: List[AbstractTimeSeriesRange], start: Optional[int] = 0, page_size: Optional[int] = int_max, includes: Optional[Callable[[TimeSeriesIncludeBuilder], None]] = None, diff --git a/ravendb/documents/queries/facets/misc.py b/ravendb/documents/queries/facets/misc.py index 6c966f75..561459f7 100644 --- a/ravendb/documents/queries/facets/misc.py +++ b/ravendb/documents/queries/facets/misc.py @@ -3,7 +3,7 @@ import enum from typing import Union, List, Dict -from ravendb import constants +from ravendb.primitives import constants class FacetAggregation(enum.Enum): diff --git a/ravendb/documents/queries/index_query.py b/ravendb/documents/queries/index_query.py index c7579b3a..bb835557 100644 --- a/ravendb/documents/queries/index_query.py +++ b/ravendb/documents/queries/index_query.py @@ -2,7 +2,7 @@ import datetime from typing import TypeVar, Generic, Union, Optional, Dict -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.queries.query import ProjectionBehavior from ravendb.documents.queries.utils import HashCalculator diff --git a/ravendb/documents/queries/more_like_this.py b/ravendb/documents/queries/more_like_this.py index aeff5904..085447e8 100644 --- a/ravendb/documents/queries/more_like_this.py +++ b/ravendb/documents/queries/more_like_this.py @@ -3,7 +3,7 @@ from abc import ABC, abstractmethod from typing import Callable, List, Dict, Generic, TypeVar, Union, TYPE_CHECKING -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.session.tokens.query_tokens.definitions import MoreLikeThisToken diff --git a/ravendb/documents/queries/spatial.py b/ravendb/documents/queries/spatial.py index 097a6cfe..20279e3e 100644 --- a/ravendb/documents/queries/spatial.py +++ b/ravendb/documents/queries/spatial.py @@ -2,7 +2,7 @@ from abc import ABC, abstractmethod from typing import Callable, Optional -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.indexes.spatial.configuration import SpatialRelation, SpatialUnits from ravendb.documents.session.tokens.misc import WhereOperator from ravendb.documents.session.tokens.query_tokens.query_token import QueryToken diff --git a/ravendb/documents/queries/utils.py b/ravendb/documents/queries/utils.py index 7a607da0..64e0e5eb 100644 --- a/ravendb/documents/queries/utils.py +++ b/ravendb/documents/queries/utils.py @@ -1,7 +1,7 @@ import hashlib -from typing import Iterable, TYPE_CHECKING, List +from typing import Iterable, TYPE_CHECKING -from ravendb import constants +from ravendb.primitives import constants from ravendb.tools.utils import Utils if TYPE_CHECKING: diff --git a/ravendb/documents/session/document_info.py b/ravendb/documents/session/document_info.py index 5edf206b..bc8de6a1 100644 --- a/ravendb/documents/session/document_info.py +++ b/ravendb/documents/session/document_info.py @@ -1,7 +1,7 @@ from __future__ import annotations from typing import Any, Dict -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.session.concurrency_check_mode import ConcurrencyCheckMode from ravendb.json.metadata_as_dictionary import MetadataAsDictionary diff --git a/ravendb/documents/session/document_session.py b/ravendb/documents/session/document_session.py index b9232cfd..60c0378c 100644 --- a/ravendb/documents/session/document_session.py +++ b/ravendb/documents/session/document_session.py @@ -2,21 +2,20 @@ import abc import copy -import datetime +from datetime import datetime import http import json import os import time import uuid -from typing import Union, Callable, TYPE_CHECKING, Optional, Dict, List, Type, TypeVar, Tuple, Generic +from typing import Union, Callable, TYPE_CHECKING, Optional, Dict, List, Type, TypeVar, Tuple, Generic, Set -from ravendb import constants -from ravendb.constants import int_max +from ravendb.primitives import constants +from ravendb.primitives.constants import int_max from ravendb.documents.operations.counters import CounterOperation, CounterOperationType, GetCountersOperation from ravendb.documents.operations.time_series import ( TimeSeriesOperation, GetTimeSeriesOperation, - TimeSeriesRange, TimeSeriesRangeResult, TimeSeriesDetails, GetMultipleTimeSeriesOperation, @@ -28,6 +27,8 @@ TypedTimeSeriesEntry, TimeSeriesValuesHelper, TypedTimeSeriesRollupEntry, + ITimeSeriesValuesBindable, + TimeSeriesRange, ) from ravendb.documents.time_series import TimeSeriesOperations from ravendb.exceptions import exceptions @@ -68,6 +69,7 @@ from ravendb.documents.session.query import DocumentQuery, RawDocumentQuery from ravendb.json.metadata_as_dictionary import MetadataAsDictionary from ravendb.documents.session.operations.load_operation import LoadOperation +from ravendb.tools.time_series import TSRangeHelper from ravendb.tools.utils import Utils, Stopwatch, CaseInsensitiveDict from ravendb.documents.commands.batches import ( PatchCommandData, @@ -102,6 +104,7 @@ _T = TypeVar("_T") _TIndex = TypeVar("_TIndex", bound=AbstractCommonApiForIndexes) +_T_TS_Values_Bindable = TypeVar("_T_TS_Values_Bindable", bound=ITimeSeriesValuesBindable) class DocumentSession(InMemoryDocumentSessionOperations): @@ -168,7 +171,7 @@ def execute_all_pending_lazy_operations(self) -> ResponseTimeInformation: requests = [] for i in range(len(self._pending_lazy_operations)): # todo: pending lazy operation create request - WIP - req = self._pending_lazy_operations[i].create_request() + req = self._pending_lazy_operations[i].create_arequest() if req is None: self._pending_lazy_operations.pop(i) i -= 1 @@ -177,7 +180,7 @@ def execute_all_pending_lazy_operations(self) -> ResponseTimeInformation: if not requests: return ResponseTimeInformation() - sw = datetime.datetime.now() # todo: replace with perf_counter or Stopwatch + sw = Stopwatch.create_started() response_time_duration = ResponseTimeInformation() while self._execute_lazy_operations_single_step(response_time_duration, requests, sw): time.sleep(0.1) @@ -188,7 +191,7 @@ def execute_all_pending_lazy_operations(self) -> ResponseTimeInformation: if value is not None: value(pending_lazy_operation.result) - elapsed = datetime.datetime.now() - sw + elapsed = sw.elapsed() response_time_duration.total_client_duration = elapsed self._pending_lazy_operations.clear() @@ -281,6 +284,7 @@ def load( ) -> Union[Dict[str, _T], _T]: if key_or_keys is None: return None # todo: return default value of object_type, not always None + if includes is None: load_operation = LoadOperation(self) self.__load_internal_stream( @@ -288,28 +292,21 @@ def load( ) result = load_operation.get_documents(object_type) return result.popitem()[1] if len(result) == 1 else result if result else None + include_builder = IncludeBuilder(self.conventions) includes(include_builder) - # todo: time series - # time_series_includes = ( - # [include_builder.time_series_to_include] if include_builder.time_series_to_include is not None else None - # ) - - time_series_includes = [] - - compare_exchange_values_to_include = ( - include_builder._compare_exchange_values_to_include - if include_builder._compare_exchange_values_to_include is not None - else None + time_series_includes = ( + include_builder.time_series_to_include if include_builder.time_series_to_include is not None else None ) + compare_exchange_values_to_include = include_builder.compare_exchange_values_to_include result = self._load_internal( object_type, [key_or_keys] if isinstance(key_or_keys, str) else key_or_keys, - include_builder._documents_to_include if include_builder._documents_to_include else None, - include_builder._counters_to_include if include_builder._counters_to_include else None, - include_builder._is_all_counters, + include_builder.documents_to_include if include_builder.documents_to_include else None, + include_builder.counters_to_include if include_builder.counters_to_include else None, + include_builder.is_all_counters, time_series_includes, compare_exchange_values_to_include, ) @@ -320,7 +317,7 @@ def _load_internal( self, object_type: Type[_T], keys: List[str], - includes: List[str], + includes: Optional[Set[str]], counter_includes: List[str] = None, include_all_counters: bool = False, time_series_includes: List[TimeSeriesRange] = None, @@ -463,29 +460,45 @@ def time_series_for_entity(self, entity: object, name: str = None) -> SessionDoc return SessionDocumentTimeSeries.from_entity(self, entity, name) def typed_time_series_for( - self, object_type: Type[_T], document_id: str, name: Optional[str] = None - ) -> SessionDocumentTypedTimeSeries[_T]: - return SessionDocumentTypedTimeSeries(object_type, self, document_id, name) + self, ts_bindable_object_type: Type[_T_TS_Values_Bindable], document_id: str, name: Optional[str] = None + ) -> SessionDocumentTypedTimeSeries[_T_TS_Values_Bindable]: + ts_name = name or TimeSeriesOperations.get_time_series_name(ts_bindable_object_type, self.conventions) + return SessionDocumentTypedTimeSeries(ts_bindable_object_type, self, document_id, ts_name) def typed_time_series_for_entity( - self, object_type: Type[_T], entity: object, name: Optional[str] = None - ) -> SessionDocumentTypedTimeSeries[_T]: - return SessionDocumentTypedTimeSeries.from_entity_typed(object_type, self, entity, name) + self, ts_bindable_object_type: Type[_T_TS_Values_Bindable], entity: object, name: Optional[str] = None + ) -> SessionDocumentTypedTimeSeries[_T_TS_Values_Bindable]: + ts_name = name or TimeSeriesOperations.get_time_series_name(ts_bindable_object_type, self.conventions) + return SessionDocumentTypedTimeSeries.from_entity_typed(ts_bindable_object_type, self, entity, ts_name) def time_series_rollup_for( - self, object_type: Type[_T], document_id: str, policy: str, raw: Optional[str] = None - ) -> SessionDocumentRollupTypedTimeSeries[_T]: - ts_name = raw or TimeSeriesOperations.get_time_series_name(object_type, self.conventions) + self, + ts_bindable_object_type: Type[_T_TS_Values_Bindable], + document_id: str, + policy: str, + raw: Optional[str] = None, + ) -> SessionDocumentRollupTypedTimeSeries[_T_TS_Values_Bindable]: + ts_name = raw or TimeSeriesOperations.get_time_series_name(ts_bindable_object_type, self.conventions) return SessionDocumentRollupTypedTimeSeries( - object_type, self, document_id, ts_name + TimeSeriesConfiguration.TIME_SERIES_ROLLUP_SEPARATOR + policy + ts_bindable_object_type, + self, + document_id, + ts_name + TimeSeriesConfiguration.TIME_SERIES_ROLLUP_SEPARATOR + policy, ) def time_series_rollup_for_entity( - self, object_type: Type[_T], entity: object, policy: str, raw: Optional[str] = None - ) -> SessionDocumentRollupTypedTimeSeries[_T]: - ts_name = raw or TimeSeriesOperations.get_time_series_name(object_type, self.conventions) + self, + ts_bindable_object_type: Type[_T_TS_Values_Bindable], + entity: object, + policy: str, + raw: Optional[str] = None, + ) -> SessionDocumentRollupTypedTimeSeries[_T_TS_Values_Bindable]: + ts_name = raw or TimeSeriesOperations.get_time_series_name(ts_bindable_object_type, self.conventions) return SessionDocumentRollupTypedTimeSeries.from_entity_rollup_typed( - object_type, self, entity, ts_name + TimeSeriesConfiguration.TIME_SERIES_ROLLUP_SEPARATOR + policy + ts_bindable_object_type, + self, + entity, + ts_name + TimeSeriesConfiguration.TIME_SERIES_ROLLUP_SEPARATOR + policy, ) class _EagerSessionOperations: @@ -521,7 +534,7 @@ def execute_all_pending_lazy_operations(self) -> ResponseTimeInformation: if value is not None: value(pending_lazy_operation.result) - elapsed = datetime.datetime.now() - sw.elapsed() + elapsed = datetime.now() - sw.elapsed() response_time_duration.total_client_duration = elapsed self.__session._pending_lazy_operations.clear() @@ -531,7 +544,7 @@ def execute_all_pending_lazy_operations(self) -> ResponseTimeInformation: class _Advanced: def __init__(self, session: DocumentSession): self._session = session - self.__vals_count = 0 + self.__values_count = 0 self.__custom_count = 0 self.__attachment = None @@ -618,7 +631,7 @@ def get_change_vector_for(self, entity: object) -> str: raise ValueError("Entity cannot be None") return self._session._get_document_info(entity).metadata.get(constants.Documents.Metadata.CHANGE_VECTOR) - def get_last_modified_for(self, entity: object) -> datetime.datetime: + def get_last_modified_for(self, entity: object) -> datetime: if entity is None: raise ValueError("Entity cannot be None") document_info = self._session._get_document_info(entity) @@ -833,17 +846,19 @@ def __try_merge_patches(self, document_key: str, patch_request: PatchRequest) -> self._session._deferred_commands.remove(command) # We'll overwrite the deferredCommandsMap when calling Defer # No need to call deferredCommandsMap.remove((id, CommandType.PATCH, null)); + if not isinstance(command, PatchCommandData): + raise TypeError(f"Command class {command.__class__.name} is invalid") old_patch: PatchCommandData = command new_script = old_patch.patch.script + "\n" + patch_request.script - new_vals = dict(old_patch.patch.values) + new_values = dict(old_patch.patch.values) for key, value in patch_request.values.items(): - new_vals[key] = value + new_values[key] = value new_patch_request = PatchRequest() new_patch_request.script = new_script - new_patch_request.values = new_vals + new_patch_request.values = new_values self.defer(PatchCommandData(document_key, None, new_patch_request, None)) return True @@ -854,10 +869,10 @@ def increment(self, key_or_entity: Union[object, str], path: str, value_to_add: key_or_entity = str(metadata.get(constants.Documents.Metadata.ID)) patch_request = PatchRequest() variable = f"this.{path}" - value = f"args.val_{self.__vals_count}" + value = f"args.val_{self.__values_count}" patch_request.script = f"{variable} = {variable} ? {variable} + {value} : {value} ;" - patch_request.values = {f"val_{self.__vals_count}": value_to_add} - self.__vals_count += 1 + patch_request.values = {f"val_{self.__values_count}": value_to_add} + self.__values_count += 1 if not self.__try_merge_patches(key_or_entity, patch_request): self.defer(PatchCommandData(key_or_entity, None, patch_request, None)) @@ -867,10 +882,10 @@ def patch(self, key_or_entity: Union[object, str], path: str, value: object) -> key_or_entity = metadata[constants.Documents.Metadata.ID] patch_request = PatchRequest() - patch_request.script = f"this.{path} = args.val_{self.__vals_count};" - patch_request.values = {f"val_{self.__vals_count}": value} + patch_request.script = f"this.{path} = args.val_{self.__values_count};" + patch_request.values = {f"val_{self.__values_count}": value} - self.__vals_count += 1 + self.__values_count += 1 if not self.__try_merge_patches(key_or_entity, patch_request): self.defer(PatchCommandData(key_or_entity, None, patch_request, None)) @@ -913,8 +928,8 @@ def patch_object( def add_or_patch(self, key: str, entity: object, path_to_object: str, value: object) -> None: patch_request = PatchRequest() - patch_request.script = f"this.{path_to_object} = args.val_{self.__vals_count}" - patch_request.values = {f"val_{self.__vals_count}": value} + patch_request.script = f"this.{path_to_object} = args.val_{self.__values_count}" + patch_request.values = {f"val_{self.__values_count}": value} collection_name = self._session._request_executor.conventions.get_collection_name(entity) python_type = self._session._request_executor.conventions.get_python_class_name(type(entity)) @@ -927,7 +942,7 @@ def add_or_patch(self, key: str, entity: object, path_to_object: str, value: obj new_instance = self._session.entity_to_json.convert_entity_to_json(entity, document_info) - self.__vals_count += 1 + self.__values_count += 1 patch_command_data = PatchCommandData(key, None, patch_request) patch_command_data.create_if_missing = new_instance @@ -956,7 +971,7 @@ def add_or_patch_array( new_instance = self._session.entity_to_json.convert_entity_to_json(entity, document_info) - self.__vals_count += 1 + self.__values_count += 1 patch_command_data = PatchCommandData(key, None, patch_request) patch_command_data.create_if_missing = new_instance @@ -964,11 +979,11 @@ def add_or_patch_array( def add_or_increment(self, key: str, entity: object, path_to_object: str, val_to_add: object) -> None: variable = f"this.{path_to_object}" - value = f"args.val_{self.__vals_count}" + value = f"args.val_{self.__values_count}" patch_request = PatchRequest() patch_request.script = f"{variable} = {variable} ? {variable} + {value} : {value}" - patch_request.values = {f"val_{self.__vals_count}": val_to_add} + patch_request.values = {f"val_{self.__values_count}": val_to_add} collection_name = self._session._request_executor.conventions.get_collection_name(entity) python_type = self._session._request_executor.conventions.find_python_class_name(entity.__class__) @@ -981,7 +996,7 @@ def add_or_increment(self, key: str, entity: object, path_to_object: str, val_to new_instance = self._session.entity_to_json.convert_entity_to_json(entity, document_info) - self.__vals_count += 1 + self.__values_count += 1 patch_command_data = PatchCommandData(key, None, patch_request) patch_command_data.create_if_missing = new_instance @@ -1044,7 +1059,7 @@ def conditional_load( r = self._session.track_entity_document_info(object_type, document_info) return ConditionalLoadResult.create(r, cmd.result.change_vector) - # todo: stream, query and fors like timeseriesrollupfor, conditional load + # todo: stream, query and fors like time_series_rollup_for, conditional load class _Attachment: def __init__(self, session: DocumentSession): @@ -1444,29 +1459,28 @@ def conventions(self) -> DocumentConventions: def session(self) -> InMemoryDocumentSessionOperations: return self.__session + def document_query_from_index_class( + self, index_class: Type[_TIndex], object_type: Optional[Type[_T]] = None + ) -> DocumentQuery[_T]: + if not issubclass(index_class, AbstractCommonApiForIndexes): + raise TypeError( + f"Incorrect type, {index_class} isn't an index. " + f"It doesn't inherit from {AbstractCommonApiForIndexes.__name__} " + ) + index = index_class() + return self.document_query(object_type, index.index_name, None, index.is_map_reduce) + def document_query( self, - object_type: type, - index_class_or_name: Union[type, str] = None, + object_type: Optional[Type[_T]] = None, + index_name: Union[type, str] = None, collection_name: str = None, is_map_reduce: bool = False, - ) -> DocumentQuery: - if isinstance(index_class_or_name, type): - if not issubclass(index_class_or_name, AbstractCommonApiForIndexes): - raise TypeError( - f"Incorrect type, {index_class_or_name} isn't an index. It doesn't inherit from" - f" AbstractCommonApiForIndexes" - ) - # todo: check the code below - index_class_or_name: AbstractCommonApiForIndexes = Utils.initialize_object( - None, index_class_or_name, True - ) - index_class_or_name = index_class_or_name.index_name - index_name_and_collection = self.session._process_query_parameters( - object_type, index_class_or_name, collection_name, self.session.conventions + ) -> DocumentQuery[_T]: + index_name, collection_name = self.session._process_query_parameters( + object_type, index_name, collection_name, self.session.conventions ) - index_name = index_name_and_collection[0] - collection_name = index_name_and_collection[1] + return DocumentQuery(object_type, self, index_name, collection_name, is_map_reduce) class SessionCountersBase: @@ -1478,7 +1492,7 @@ def __init__(self, session: InMemoryDocumentSessionOperations, document_id_or_en self.doc_id = document_id_or_entity self.session = session else: - document = session._documents_by_entity.get(document_id_or_entity) + document = session.documents_by_entity.get(document_id_or_entity) if document is None: self._throw_entity_not_in_session(document_id_or_entity) return @@ -1490,14 +1504,16 @@ def increment(self, counter: str, delta: int = 1) -> None: raise ValueError("Counter name cannot be empty") counter_op = CounterOperation(counter, CounterOperationType.INCREMENT, delta) - document_info = self.session._documents_by_id.get_value(self.doc_id) - if document_info is not None and document_info.entity in self.session._deleted_entities: + document_info = self.session.documents_by_id.get_value(self.doc_id) + if document_info is not None and document_info.entity in self.session.deleted_entities: self._throw_document_already_deleted_in_session(self.doc_id, counter) - command = self.session._deferred_commands_map.get( + command = self.session.deferred_commands_map.get( IdTypeAndName.create(self.doc_id, CommandType.COUNTERS, None), None ) if command is not None: + if not isinstance(command, CountersBatchCommandData): + raise TypeError(f"Command class {command.__class__.name} is invalid") counters_batch_command_data: CountersBatchCommandData = command if counters_batch_command_data.has_delete(counter): self._throw_increment_counter_after_delete_attempt(self.doc_id, counter) @@ -1510,16 +1526,16 @@ def delete(self, counter: str) -> None: if not counter or counter.isspace(): raise ValueError("Counter name is required") - if IdTypeAndName.create(self.doc_id, CommandType.DELETE, None) in self.session._deferred_commands_map: + if IdTypeAndName.create(self.doc_id, CommandType.DELETE, None) in self.session.deferred_commands_map: return # no-op - document_info = self.session._documents_by_id.get_value(self.doc_id) - if document_info is not None and document_info.entity in self.session._deleted_entities: + document_info = self.session.documents_by_id.get_value(self.doc_id) + if document_info is not None and document_info.entity in self.session.deleted_entities: return # no-op counter_op = CounterOperation(counter, CounterOperationType.DELETE) - command = self.session._deferred_commands_map.get( + command = self.session.deferred_commands_map.get( IdTypeAndName.create(self.doc_id, CommandType.COUNTERS, None), None ) if command is not None: @@ -1572,7 +1588,7 @@ def get_all(self) -> Dict[str, int]: missing_counters = not cache[0] - document = self.session._documents_by_id.get_value(self.doc_id) + document = self.session.documents_by_id.get_value(self.doc_id) if document is not None: metadata_counters: Dict = document.metadata.get(constants.Documents.Metadata.COUNTERS, None) if metadata_counters is None: @@ -1616,7 +1632,7 @@ def get(self, counter) -> int: else: cache = [False, CaseInsensitiveDict()] - document = self.session._documents_by_id.get_value(self.doc_id) + document = self.session.documents_by_id.get_value(self.doc_id) metadata_has_counter_name = False if document is not None: metadata_counters = document.metadata.get(constants.Documents.Metadata.COUNTERS, None) @@ -1650,7 +1666,7 @@ def get_many(self, counters: List[str]) -> Dict[str, int]: cache = [False, CaseInsensitiveDict()] metadata_counters = None - document = self.session._documents_by_id.get_value(self.doc_id) + document = self.session.documents_by_id.get_value(self.doc_id) if document is not None: metadata_counters = document.metadata.get(constants.Documents.Metadata.COUNTERS, None) @@ -1716,7 +1732,7 @@ def from_entity( if entity is None: raise ValueError("Entity cannot be None") - document_info = session._documents_by_entity.get(entity) + document_info = session.documents_by_entity.get(entity) if document_info is None: cls._throw_entity_not_in_session() @@ -1729,56 +1745,57 @@ def from_entity( @staticmethod def _throw_entity_not_in_session() -> None: raise ValueError( - "Entity is not associated with the session, cannot perform timeseries operations to it. " + "Entity is not associated with the session, cannot perform time series operations to it. " "Use document id instead or track the entity in the session" ) @staticmethod def _throw_document_already_deleted_in_session(document_id: str, time_series: str) -> None: raise ValueError( - f"Can't modify timeseries {time_series} of document {document_id}" + f"Can't modify time series {time_series} of document {document_id}" f", the document was already deleted in this session." ) - def append_single(self, timestamp: datetime.datetime, value: float, tag: Optional[str] = None) -> None: + def append_single(self, timestamp: datetime, value: float, tag: Optional[str] = None) -> None: self.append(timestamp, [value], tag) - def append(self, timestamp: datetime.datetime, values: List[float], tag: Optional[str] = None) -> None: - document_info = self.session._documents_by_id.get_value(self.doc_id) - if document_info is not None and document_info.entity in self.session._deleted_entities: + def append(self, timestamp: datetime, values: List[float], tag: Optional[str] = None) -> None: + if not isinstance(values, List): + raise TypeError( + f"The 'values' arg must be a list. If you want to append single float use 'append_single(..)' instead." + ) + document_info = self.session.documents_by_id.get_value(self.doc_id) + if document_info is not None and document_info.entity in self.session.deleted_entities: self._throw_document_already_deleted_in_session(self.doc_id, self.name) op = TimeSeriesOperation.AppendOperation(timestamp, values, tag) - command = self.session._deferred_commands_map.get( + command = self.session.deferred_commands_map.get( IdTypeAndName.create(self.doc_id, CommandType.TIME_SERIES, self.name) ) if command is not None: ts_cmd: TimeSeriesBatchCommandData = command ts_cmd.time_series.append(op) else: - appends = [] - appends.append(op) + appends = [op] self.session.defer(TimeSeriesBatchCommandData(self.doc_id, self.name, appends, None)) def delete_all(self) -> None: self.delete(None, None) - def delete_at(self, at: datetime.datetime) -> None: + def delete_at(self, at: datetime) -> None: self.delete(at, at) - def delete( - self, datetime_from: Optional[datetime.datetime] = None, datetime_to: Optional[datetime.datetime] = None - ): - document_info = self.session._documents_by_id.get_value(self.doc_id) - if document_info is not None and document_info.entity in self.session._deleted_entities: + def delete(self, datetime_from: Optional[datetime] = None, datetime_to: Optional[datetime] = None): + document_info = self.session.documents_by_id.get_value(self.doc_id) + if document_info is not None and document_info.entity in self.session.deleted_entities: self._throw_document_already_deleted_in_session(self.doc_id, self.name) op = TimeSeriesOperation.DeleteOperation(datetime_from, datetime_to) - command = self.session._deferred_commands_map.get( + command = self.session.deferred_commands_map.get( IdTypeAndName.create(self.doc_id, CommandType.TIME_SERIES, self.name), None ) - if command is not None: + if command is not None and isinstance(command, TimeSeriesBatchCommandData): ts_cmd: TimeSeriesBatchCommandData = command ts_cmd.time_series.delete(op) @@ -1788,8 +1805,8 @@ def delete( def get_time_series_and_includes( self, - from_datetime: datetime.datetime, - to_datetime: datetime.datetime, + from_datetime: datetime, + to_datetime: datetime, includes: Optional[Callable[[TimeSeriesIncludeBuilder], None]], start: int, page_size: int, @@ -1797,14 +1814,14 @@ def get_time_series_and_includes( if page_size == 0: return [] - document = self.session._documents_by_id.get_value(self.doc_id) + document = self.session.documents_by_id.get_value(self.doc_id) if document is not None: metadata_time_series_raw = document.metadata.get(constants.Documents.Metadata.TIME_SERIES) if metadata_time_series_raw is not None and isinstance(metadata_time_series_raw, list): time_series = metadata_time_series_raw if not any(ts.lower() == self.name.lower() for ts in time_series): - # the document is loaded in the session, but the metadata says that there is no such timeseries + # the document is loaded in the session, but the metadata says that there is no such time series return [] self.session.increment_requests_count() @@ -1824,7 +1841,7 @@ def get_time_series_and_includes( ranges = cache.get(self.name) if ranges is not None and len(ranges) > 0: # update - index = 0 if ranges[0].from_date > to_datetime else len(ranges) + index = 0 if TSRangeHelper.left(ranges[0].from_date) > TSRangeHelper.right(to_datetime) else len(ranges) ranges.insert(index, range_result) else: item = [range_result] @@ -1845,23 +1862,23 @@ def _handle_includes(self, range_result: TimeSeriesRangeResult) -> None: @staticmethod def _skip_and_trim_range_if_needed( - from_date: datetime.datetime, - to_date: datetime.datetime, + from_date: datetime, + to_date: datetime, from_range: TimeSeriesRangeResult, to_range: TimeSeriesRangeResult, values: List[TimeSeriesEntry], skip: int, trim: int, ) -> List[TimeSeriesEntry]: - if from_range is not None and from_date <= from_range.to_date: + if from_range is not None and TSRangeHelper.left(from_date) <= TSRangeHelper.right(from_range.to_date): # need to skip a part of the first range - if to_range is not None and to_date >= to_range.from_date: + if to_range is not None and TSRangeHelper.left(to_range.from_date) <= TSRangeHelper.right(to_date): # also need to trim a part of the last range return values[skip : len(values) - trim] return values[skip:] - if to_range is not None and to_date >= to_range.from_date: + if to_range is not None and TSRangeHelper.left(to_range.from_date) <= TSRangeHelper.right(to_date): # trim a part of the last range return values[: len(values) - trim] @@ -1869,8 +1886,8 @@ def _skip_and_trim_range_if_needed( def _serve_from_cache( self, - from_date: datetime.datetime, - to_date: datetime.datetime, + from_date: datetime, + to_date: datetime, start: int, page_size: int, includes: Callable[[TimeSeriesIncludeBuilder], None], @@ -1882,8 +1899,8 @@ def _serve_from_cache( # if found, chop just the relevant part from it and return to the user # otherwise, try to find two ranges (from_range, to_range) - # such that 'from_range' is the last occurence for which range.from_date <= from_date - # and to_range is the first occurence for which range.to_date >= to + # such that 'from_range' is the last occurrence for which range.from_date <= from_date + # and to_range is the first occurrence for which range.to_date >= to # At the same time, figure out the missing partial ranges that we need to get from the server from_range_index = -1 @@ -1892,9 +1909,9 @@ def _serve_from_cache( for i in range(len(ranges)): to_range_index += 1 - if ranges[to_range_index].from_date <= from_date: + if TSRangeHelper.left(ranges[to_range_index].from_date) <= TSRangeHelper.left(from_date): if ( - ranges[to_range_index].to_date >= to_date + TSRangeHelper.right(ranges[to_range_index].to_date) >= TSRangeHelper.right(to_date) or len(ranges[to_range_index].entries) - start >= page_size ): # we have the entire range in cache @@ -1915,14 +1932,21 @@ def _serve_from_cache( from_to_use = ( from_date - if (to_range_index == 0 or from_date < ranges[to_range_index - 1].to_date) + if ( + to_range_index == 0 + or TSRangeHelper.left(from_date) < TSRangeHelper.right(ranges[to_range_index - 1].to_date) + ) else ranges[to_range_index - 1].to_date ) - to_to_use = ranges[to_range_index].from_date if ranges[to_range_index].from_date <= to_date else to_date + to_to_use = ( + ranges[to_range_index].from_date + if TSRangeHelper.left(ranges[to_range_index].from_date) <= TSRangeHelper.right(to_date) + else to_date + ) ranges_to_get_from_server.append(TimeSeriesRange(self.name, from_to_use, to_to_use)) - if ranges[to_range_index].to_date >= to_date: + if TSRangeHelper.right(ranges[to_range_index].to_date) >= TSRangeHelper.right(to_date): break if to_range_index == len(ranges) - 1: @@ -1971,14 +1995,14 @@ def _serve_from_cache( return result_to_user - def _register_includes(self, detials: TimeSeriesDetails) -> None: - for range_result in detials.values.get(self.name): + def _register_includes(self, details: TimeSeriesDetails) -> None: + for range_result in details.values.get(self.name): self._handle_includes(range_result) @staticmethod def _merge_ranges_with_results( - from_date: datetime.datetime, - to_date: datetime.datetime, + from_date: datetime, + to_date: datetime, ranges: List[TimeSeriesRangeResult], from_range_index: int, to_range_index: int, @@ -1994,7 +2018,11 @@ def _merge_ranges_with_results( for i in range(start, end + 1): if i == from_range_index: - if ranges[i].from_date <= from_date <= ranges[i].to_date: + if ( + TSRangeHelper.left(ranges[i].from_date) + <= TSRangeHelper.left(from_date) + <= TSRangeHelper.right(ranges[i].to_date) + ): # requested range [from, to] starts inside 'fromRange' # i.e from_range.from_date <= from_date <= from_range.to_date # so we might need to skip a part of it when we return the @@ -2003,13 +2031,13 @@ def _merge_ranges_with_results( if ranges[i].entries is not None: for v in ranges[i].entries: merged_values.append(v) - if v.timestamp < from_date: + if v.timestamp < TSRangeHelper.left(from_date): skip += 1 continue - if ( - current_result_index < len(result_from_server) - and result_from_server[current_result_index].from_date < ranges[i].from_date + if current_result_index < len(result_from_server) and ( + TSRangeHelper.left(result_from_server[current_result_index].from_date) + < TSRangeHelper.left(ranges[i].from_date) ): # add current result from server to the merged list # in order to avoid duplication, skip first item in range @@ -2019,20 +2047,20 @@ def _merge_ranges_with_results( merged_values.extend(to_add) if i == to_range_index: - if ranges[i].from_date <= to_date: + if TSRangeHelper.left(ranges[i].from_date) <= TSRangeHelper.right(to_date): # requested range [from_date, to_date] ends inside to_range # so we might need to trim a part of it when we return the # result to the user (i.e. trim [to_date, to_range.to_date] for index in range(0 if len(merged_values) == 0 else 1, len(ranges[i].entries)): merged_values.append(ranges[i].entries[index]) - if ranges[i].entries[index].timestamp > to_date: + if ranges[i].entries[index].timestamp > TSRangeHelper.right(to_date): trim += 1 continue # add current range from cache to the merged list - # in order to avoid duplication, skip frist item in range if needed + # in order to avoid duplication, skip first item in range if needed to_add = ranges[i].entries[0 if len(merged_values) == 0 else 1 :] merged_values.extend(to_add) @@ -2061,8 +2089,8 @@ def _merge_ranges_with_results( @staticmethod def _chop_relevant_range( ts_range: TimeSeriesRangeResult, - from_date: datetime.datetime, - to_date: datetime.datetime, + from_date: datetime, + to_date: datetime, start: int, page_size: int, ) -> List[TimeSeriesEntry]: @@ -2071,10 +2099,10 @@ def _chop_relevant_range( results = [] for value in ts_range.entries: - if value.timestamp > to_date: + if value.timestamp > TSRangeHelper.right(to_date): break - if value.timestamp < from_date: + if value.timestamp < TSRangeHelper.left(from_date): continue start -= 1 @@ -2091,8 +2119,8 @@ def _chop_relevant_range( def _get_from_cache( self, - from_date: datetime.datetime, - to_date: datetime.datetime, + from_date: datetime, + to_date: datetime, includes: Optional[Callable[[TimeSeriesIncludeBuilder], None]], start: int, page_size: int, @@ -2106,7 +2134,7 @@ def _get_from_cache( result_to_user = self._serve_from_cache(from_date, to_date, start, page_size, includes) return result_to_user - def _not_in_cache(self, from_date: datetime.datetime, to_date: datetime.datetime): + def _not_in_cache(self, from_date: datetime, to_date: datetime): cache = self.session.time_series_by_doc_id.get(self.doc_id, None) if cache is None: return True @@ -2115,7 +2143,11 @@ def _not_in_cache(self, from_date: datetime.datetime, to_date: datetime.datetime if ranges is None: return True - return not ranges or ranges[0].from_date > to_date or from_date > ranges[-1].to_date + return ( + not ranges + or TSRangeHelper.left(ranges[0].from_date) > TSRangeHelper.right(to_date) + or TSRangeHelper.left(from_date) > TSRangeHelper.right(ranges[-1].to_date) + ) class CachedEntryInfo: def __init__( @@ -2144,7 +2176,7 @@ def from_entity( if entity is None: raise ValueError("Entity cannot be None") - document_info = session._documents_by_entity.get(entity) + document_info = session.documents_by_entity.get(entity) if document_info is None: cls._throw_entity_not_in_session() @@ -2156,8 +2188,8 @@ def from_entity( def get( self, - from_date: Optional[datetime.datetime] = None, - to_date: Optional[datetime.datetime] = None, + from_date: Optional[datetime] = None, + to_date: Optional[datetime] = None, start: int = 0, page_size: int = int_max, ) -> Optional[List[TimeSeriesEntry]]: @@ -2165,8 +2197,8 @@ def get( def get_include( self, - from_date: Optional[datetime.datetime] = None, - to_date: Optional[datetime.datetime] = None, + from_date: Optional[datetime] = None, + to_date: Optional[datetime] = None, includes: Optional[Callable[[TimeSeriesIncludeBuilder], None]] = None, start: int = 0, page_size: int = int_max, @@ -2182,77 +2214,97 @@ def get_include( return results_to_user[:page_size] -class SessionDocumentTypedTimeSeries(SessionTimeSeriesBase, Generic[_T]): - def __init__(self, object_type: Type[_T], session: InMemoryDocumentSessionOperations, document_id: str, name: str): +class SessionDocumentTypedTimeSeries(SessionTimeSeriesBase, Generic[_T_TS_Values_Bindable]): + def __init__( + self, + ts_bindable_object_type: Type[_T_TS_Values_Bindable], + session: InMemoryDocumentSessionOperations, + document_id: str, + name: str, + ): super(SessionDocumentTypedTimeSeries, self).__init__(session, document_id, name) - self._object_type = object_type + self._ts_value_object_type = ts_bindable_object_type @classmethod def from_entity_typed( - cls, object_type: Type[_T], session: InMemoryDocumentSessionOperations, entity: object, name: str + cls, + ts_bindable_object_type: Type[_T_TS_Values_Bindable], + session: InMemoryDocumentSessionOperations, + entity: object, + name: str, ) -> SessionDocumentTypedTimeSeries: if entity is None: raise ValueError("Entity cannot be None") - document_info = session._documents_by_entity.get(entity) + document_info = session.documents_by_entity.get(entity) if document_info is None: cls._throw_entity_not_in_session() - if not name or name.isspace(): - raise ValueError("Name cannot be None or whitespace") - return cls(object_type, session, document_info.key, name) + if not name: + name = TimeSeriesOperations.get_time_series_name(ts_bindable_object_type, session.conventions) + return cls(ts_bindable_object_type, session, document_info.key, name) def get( self, - from_date: Optional[datetime.datetime] = None, - to_date: Optional[datetime.datetime] = None, + from_date: Optional[datetime] = None, + to_date: Optional[datetime] = None, start: int = 0, page_size: int = int_max, - ) -> Optional[List[TypedTimeSeriesEntry[_T]]]: + ) -> Optional[List[TypedTimeSeriesEntry[_T_TS_Values_Bindable]]]: if super()._not_in_cache(from_date, to_date): entries = self.get_time_series_and_includes(from_date, to_date, None, start, page_size) if entries is None: return None - return [x.as_typed_entry(self._object_type) for x in entries] + return [x.as_typed_entry(self._ts_value_object_type) for x in entries] results = self._get_from_cache(from_date, to_date, None, start, page_size) - return [x.as_typed_entry(self._object_type) for x in results] + return [x.as_typed_entry(self._ts_value_object_type) for x in results] - def append(self, timestamp: datetime.datetime, entry: _T, tag: Optional[str] = None) -> None: + def append(self, timestamp: datetime, entry: _T_TS_Values_Bindable, tag: Optional[str] = None) -> None: values = TimeSeriesValuesHelper.get_values(type(entry), entry) - self.append(timestamp, values, tag) + super().append(timestamp, values, tag) - def append_entry(self, entry: TypedTimeSeriesEntry[_T]) -> None: + def append_entry(self, entry: TypedTimeSeriesEntry[_T_TS_Values_Bindable]) -> None: self.append_single(entry.timestamp, entry.value, entry.tag) -class SessionDocumentRollupTypedTimeSeries(SessionTimeSeriesBase, Generic[_T]): - def __init__(self, object_type: Type[_T], session: InMemoryDocumentSessionOperations, document_id: str, name: str): +class SessionDocumentRollupTypedTimeSeries(SessionTimeSeriesBase, Generic[_T_TS_Values_Bindable]): + def __init__( + self, + ts_bindable_object_type: Type[_T_TS_Values_Bindable], + session: InMemoryDocumentSessionOperations, + document_id: str, + name: str, + ): super(SessionDocumentRollupTypedTimeSeries, self).__init__(session, document_id, name) - self._object_type = object_type + self._object_type = ts_bindable_object_type @classmethod def from_entity_rollup_typed( - cls, object_type: Type[_T], session: InMemoryDocumentSessionOperations, entity: object, name: str + cls, + ts_bindable_object_type: Type[_T_TS_Values_Bindable], + session: InMemoryDocumentSessionOperations, + entity: object, + name: str, ) -> SessionDocumentRollupTypedTimeSeries: if entity is None: raise ValueError("Entity cannot be None") - document_info = session._documents_by_entity.get(entity) + document_info = session.documents_by_entity.get(entity) if document_info is None: cls._throw_entity_not_in_session() if not name or name.isspace(): raise ValueError("Name cannot be None or whitespace") - return cls(object_type, session, document_info.key, name) + return cls(ts_bindable_object_type, session, document_info.key, name) def get( self, - from_date: Optional[datetime.datetime] = None, - to_date: Optional[datetime.datetime] = None, + from_date: Optional[datetime] = None, + to_date: Optional[datetime] = None, start: int = 0, page_size: int = int_max, ): diff --git a/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py b/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py index 634e0054..fd96674d 100644 --- a/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py +++ b/ravendb/documents/session/document_session_operations/in_memory_document_session_operations.py @@ -15,6 +15,7 @@ ForceRevisionStrategy, DocumentsChanges, ) +from ravendb.tools.time_series import TSRangeHelper try: from collections.abc import MutableSet @@ -23,9 +24,9 @@ import uuid as uuid from copy import deepcopy, Error -from typing import Optional, Union, Callable, List, Dict, Set, Type, TypeVar, Tuple +from typing import Union, Callable, List, Dict, Set, Type, TypeVar, Tuple, Any -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.commands.crud import GetDocumentsResult from ravendb.documents.conventions import DocumentConventions from ravendb.documents.identity.hilo import GenerateEntityIdOnTheClient @@ -50,7 +51,6 @@ from ravendb.extensions.json_extensions import JsonExtensions from ravendb.http.raven_command import RavenCommand from ravendb.json.json_operation import JsonOperation -from ravendb.json.metadata_as_dictionary import MetadataAsDictionary from ravendb.json.result import BatchCommandResult from ravendb.documents.session.entity_to_json import EntityToJson from ravendb.tools.utils import Utils, CaseInsensitiveDict, CaseInsensitiveSet @@ -592,6 +592,22 @@ def before_query_invoke(self, before_query_event_args: BeforeQueryEventArgs): for event in self.__before_query: event(before_query_event_args) + @property + def documents_by_id(self): + return self._documents_by_id + + @property + def deleted_entities(self): + return self._deleted_entities + + @property + def deferred_commands_map(self): + return self._deferred_commands_map + + @property + def documents_by_entity(self): + return self._documents_by_entity + @property def operations(self) -> OperationExecutor: return self._operation_executor @@ -1377,47 +1393,73 @@ def __register_missing_counters_for_keys(self, keys: List[str], counters_to_incl cache[1][counter] = None - def register_time_series(self, result_time_series: dict): + def register_time_series(self, result_time_series: Dict[str, Dict[str, List[Dict[str, Any]]]]): if self.no_tracking or not result_time_series: return - for key, value in result_time_series: - if not value: + for doc_id, time_series in result_time_series.items(): + if not time_series: continue - cache = self._time_series_by_doc_id.get(key, CaseInsensitiveDict()) - for inner_key, inner_value in value: - if not inner_value: + + # compute if absent (cache the result) + session_cache = self._time_series_by_doc_id + cached_value = None + if doc_id in session_cache: + cached_value = session_cache[doc_id] + if cached_value is None: + cached_value = CaseInsensitiveDict() + session_cache[doc_id] = cached_value + + for time_series_name, time_series_ranges in time_series.items(): + if not time_series_ranges: continue - name = inner_key - for range_val in inner_value: - self.__add_to_cache(cache, range_val, name) + name = time_series_name + for time_series_range in time_series_ranges: + time_series_range_result = TimeSeriesRangeResult.from_json(time_series_range) + self.__add_to_cache(cached_value, time_series_range_result, name) @staticmethod def __add_to_cache(cache: Dict[str, List[TimeSeriesRangeResult]], new_range: TimeSeriesRangeResult, name: str): local_ranges = cache.get(name) if not local_ranges: - cache[name] = list([new_range]) + # No local ranges in cache for this series + cache[name] = [new_range] return - if local_ranges[0].from_date > new_range.to_date or local_ranges[-1].to_date < new_range.from_date: - index = 0 if local_ranges[0].from_date > new_range.to_date else len(local_ranges) - local_ranges[index] = new_range + if TSRangeHelper.left(local_ranges[0].from_date) > TSRangeHelper.right( + new_range.to_date + ) or TSRangeHelper.right(local_ranges[-1].to_date) < TSRangeHelper.left(new_range.from_date): + # the entire range [from, to] is out of cache bounds + index = ( + 0 + if TSRangeHelper.left(local_ranges[0].from_date) > TSRangeHelper.right(new_range.to_date) + else len(local_ranges) + ) + local_ranges.insert(index, new_range) return to_range_index = int() from_range_index = -1 range_already_in_cache = False + broke_out_of_loop = False for to_range_index in range(len(local_ranges)): - if local_ranges[to_range_index].from_date <= new_range.from_date: - if local_ranges[to_range_index].to_date >= new_range.to_date: + if TSRangeHelper.left(local_ranges[to_range_index].from_date) <= TSRangeHelper.left(new_range.from_date): + if TSRangeHelper.right(local_ranges[to_range_index].to_date) >= TSRangeHelper.right(new_range.to_date): range_already_in_cache = True + broke_out_of_loop = True break + from_range_index = to_range_index continue - if local_ranges[to_range_index].to_date >= new_range.to_date: + + if TSRangeHelper.right(local_ranges[to_range_index].to_date) >= TSRangeHelper.right(new_range.to_date): + broke_out_of_loop = True break + if not broke_out_of_loop: + to_range_index += 1 # in Java, the last increment happens before the condition check + if range_already_in_cache: InMemoryDocumentSessionOperations.__update_existing_range(local_ranges[to_range_index], new_range) return @@ -1462,7 +1504,7 @@ def add_to_cache( cache[time_series] = result return - if ranges[to_range_index].from_date > to_date: + if TSRangeHelper.left(ranges[to_range_index].from_date) > TSRangeHelper.right(to_date): # requested range ends before 'toRange' starts # remove all ranges that come before 'toRange' from cache # add the new range at the beginning of the list @@ -1471,7 +1513,9 @@ def add_to_cache( # and the requested range is : [1,6] # after this action cache will be : [[1,6], [7,10]] - ranges[0:to_range_index].clear() + for i in range(0, to_range_index): + del ranges[0] + time_series_range_result = TimeSeriesRangeResult(from_date, to_date, values) ranges.insert(0, time_series_range_result) return @@ -1486,7 +1530,9 @@ def add_to_cache( ranges[to_range_index].from_date = from_date ranges[to_range_index].entries = values - ranges[0:to_range_index].clear() + + for i in range(0, to_range_index): + del ranges[0] return # found a 'from_range' @@ -1494,7 +1540,7 @@ def add_to_cache( if to_range_index == len(ranges): # didn't find a 'to_range' => all the ranges in cache end before 'to' - if ranges[from_range_index].to_date < from_date: + if TSRangeHelper.left(from_date) > TSRangeHelper.right(ranges[from_range_index].to_date): # requested range starts after 'fromRange' ends, # so it needs to be placed right after it # remove all the ranges that come after 'fromRange' from cache @@ -1505,7 +1551,9 @@ def add_to_cache( # then 'fromRange' is : [2,3] # after this action cache will be : [[2,3], [4,12]] - ranges[from_range_index + 1 : len(ranges)].clear() + for i in range(from_range_index + 1, len(ranges)): + del ranges[from_range_index + 1] + time_series_range_result = TimeSeriesRangeResult(from_date, to_date, values) ranges.append(time_series_range_result) return @@ -1521,16 +1569,18 @@ def add_to_cache( ranges[from_range_index].to_date = to_date ranges[from_range_index].entries = values - ranges[from_range_index + 1 : len(ranges)].clear() + + for i in range(from_range_index + 1, len(ranges)): + del ranges[from_range_index + 1] return # found both 'from_range' and 'to_range' # the requested range is inside cache bounds - if ranges[from_range_index].to_date < from_date: + if TSRangeHelper.left(from_date) > TSRangeHelper.right(ranges[from_range_index].to_date): # requested range starts after 'from_range' ends - if ranges[to_range_index].from_date > to_date: + if TSRangeHelper.left(ranges[to_range_index].from_date) > TSRangeHelper.right(to_date): # requested range ends before 'toRange' starts # remove all ranges in between 'fromRange' and 'toRange' @@ -1541,7 +1591,9 @@ def add_to_cache( # then 'fromRange' is [2,3] and 'toRange' is [10,12] # after this action cache will be : [[2,3], [4,9], [10,12]] - ranges[from_range_index + 1 : to_range_index].clear() + for i in range(from_range_index + 1, to_range_index): + del ranges[from_range_index + 1] + time_series_range_result = TimeSeriesRangeResult(from_date, to_date, values) ranges.insert(from_range_index + 1, time_series_range_result) return @@ -1556,7 +1608,8 @@ def add_to_cache( # then 'fromRange' is [2,3] and 'toRange' is [7,10] # after this action cache will be : [[2,3], [4,10]] - ranges[from_range_index + 1 : to_range_index].clear() + for i in range(from_range_index + 1, to_range_index): + del ranges[from_range_index + 1] ranges[to_range_index].from_date = from_date ranges[to_range_index].entries = values @@ -1564,7 +1617,7 @@ def add_to_cache( # the requested range starts inside 'from_range' - if ranges[to_range_index].from_date > to_date: + if TSRangeHelper.left(ranges[to_range_index].from_date) > TSRangeHelper.right(to_date): # requested range ends before 'toRange' starts # remove all ranges in between 'fromRange' and 'toRange' @@ -1577,8 +1630,8 @@ def add_to_cache( ranges[from_range_index].to_date = to_date ranges[from_range_index].entries = values - ranges[from_range_index + 1 : to_range_index].clear() - + for i in range(from_range_index + 1, to_range_index): + del ranges[from_range_index + 1] return # the requested range starts inside 'fromRange' @@ -1594,7 +1647,8 @@ def add_to_cache( ranges[from_range_index].to_date = ranges[to_range_index].to_date ranges[from_range_index].entries = values - ranges[from_range_index + 1 : to_range_index + 1].clear() + for i in range(from_range_index + 1, to_range_index + 1): + del ranges[from_range_index + 1] @staticmethod def __merge_ranges( @@ -1604,16 +1658,19 @@ def __merge_ranges( new_range: TimeSeriesRangeResult, ) -> List[TimeSeriesEntry]: merged_values = [] - if from_range_index != -1 and local_ranges[from_range_index].to_date.time() >= new_range.from_date.time(): + if from_range_index != -1 and local_ranges[from_range_index].to_date >= new_range.from_date: for val in local_ranges[from_range_index].entries: - if val.timestamp.time() >= new_range.from_date.time(): + if val.timestamp >= new_range.from_date: break merged_values.append(val) + merged_values.extend(new_range.entries) - if to_range_index < len(local_ranges) and local_ranges[to_range_index].from_date <= new_range.to_date: + if to_range_index < len(local_ranges) and TSRangeHelper.left( + local_ranges[to_range_index].from_date + ) <= TSRangeHelper.right(new_range.to_date): for val in local_ranges[to_range_index].entries: - if val.timestamp.time() <= new_range.to_date.time(): + if val.timestamp <= new_range.to_date: continue merged_values.append(val) return merged_values @@ -1622,7 +1679,7 @@ def __merge_ranges( def __update_existing_range(local_range: TimeSeriesRangeResult, new_range: TimeSeriesRangeResult) -> None: new_values = [] for index in range(len(local_range.entries)): - if local_range.entries[index].timestamp.time() >= new_range.from_date.time(): + if local_range.entries[index].timestamp >= new_range.from_date: break new_values.append(local_range.entries[index]) @@ -1630,7 +1687,7 @@ def __update_existing_range(local_range: TimeSeriesRangeResult, new_range: TimeS new_values.extend(new_range.entries) for j in range(len(local_range.entries)): - if local_range.entries[j].timestamp.time() <= new_range.to_date.time(): + if local_range.entries[j].timestamp <= new_range.to_date: continue new_values.append(local_range.entries[j]) local_range.entries = new_values diff --git a/ravendb/documents/session/entity_to_json.py b/ravendb/documents/session/entity_to_json.py index 4c55209c..661b3efd 100644 --- a/ravendb/documents/session/entity_to_json.py +++ b/ravendb/documents/session/entity_to_json.py @@ -2,7 +2,7 @@ from copy import deepcopy from typing import Optional, TYPE_CHECKING, Union, Type, TypeVar, Dict -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.session.document_info import DocumentInfo from ravendb.documents.session.event_args import ( BeforeConversionToDocumentEventArgs, diff --git a/ravendb/documents/session/loaders/include.py b/ravendb/documents/session/loaders/include.py index 242e0701..3d59493f 100644 --- a/ravendb/documents/session/loaders/include.py +++ b/ravendb/documents/session/loaders/include.py @@ -1,12 +1,19 @@ from __future__ import annotations + import datetime -from typing import Set, Tuple, Dict, Union, Optional, TYPE_CHECKING +from typing import Set, Tuple, Dict, Union, Optional, List + +from ravendb.primitives import constants from ravendb.documents.conventions import DocumentConventions -import ravendb.documents.operations.time_series +from ravendb.primitives.time_series import TimeValue from ravendb.tools.utils import CaseInsensitiveDict, CaseInsensitiveSet -if TYPE_CHECKING: - from ravendb.documents.operations.time_series import TimeSeriesRange +from ravendb.documents.session.time_series import ( + TimeSeriesRange, + TimeSeriesTimeRange, + TimeSeriesRangeType, + TimeSeriesCountRange, +) class IncludeBuilderBase: @@ -16,31 +23,51 @@ def __init__(self, conventions: DocumentConventions): self._documents_to_include: Set[str] = set() self._alias: str = "" self._counters_to_include_by_source_path: Dict[str, Tuple[bool, Set[str]]] = CaseInsensitiveDict() - self._time_series_to_include_by_source_alias: Dict[str, Set["TimeSeriesRange"]] = {} + self._time_series_to_include_by_source_alias: Dict[str, Set[TimeSeriesRange]] = {} self._compare_exchange_values_to_include: Set[str] = set() self._include_time_series_tags: Optional[bool] = None self._include_time_series_document: Optional[bool] = None @property - def _time_series_to_include(self) -> Union[None, Set["TimeSeriesRange"]]: + def time_series_to_include(self) -> Union[None, Set["TimeSeriesRange"]]: if self._time_series_to_include_by_source_alias is None: return None - return self._time_series_to_include_by_source_alias[""] + return self._time_series_to_include_by_source_alias.get("", None) @property - def _counters_to_include(self) -> Union[None, Set[str]]: + def counters_to_include(self) -> Union[None, Set[str]]: if self._counters_to_include_by_source_path is None or len(self._counters_to_include_by_source_path) == 0: return None - value = self._counters_to_include_by_source_path.get("") + value = self._counters_to_include_by_source_path.get("", None) return value[1] if value is not None else {} @property - def _is_all_counters(self) -> bool: + def is_all_counters(self) -> bool: if self._counters_to_include_by_source_path is None: return False - value = self._counters_to_include_by_source_path.get("") + value = self._counters_to_include_by_source_path.get("", None) return value[0] if value is not None else False + @property + def alias(self): + return self._alias + + @property + def counters_to_include_by_source_path(self): + return self._counters_to_include_by_source_path + + @property + def time_series_to_include_by_source_alias(self): + return self._time_series_to_include_by_source_alias + + @property + def documents_to_include(self): + return self._documents_to_include + + @property + def compare_exchange_values_to_include(self): + return self._compare_exchange_values_to_include + def _include_compare_exchange_value(self, path: str) -> None: if self._compare_exchange_values_to_include is None: self._compare_exchange_values_to_include = {} @@ -94,20 +121,118 @@ def _with_alias(self): self._alias = f"a_{self._next_parameter_id}" self._next_parameter_id += 1 + def _assert_valid(self, alias: str, name: str) -> None: + if not name or name.isspace(): + raise ValueError("Name cannot be None or whitespace") + + if self.time_series_to_include_by_source_alias is not None: + hash_set_2 = self.time_series_to_include_by_source_alias.get(alias, None) + if hash_set_2: + if constants.TimeSeries.ALL == name: + raise RuntimeError( + "IncludeBuilder : Cannot use 'includeAllTimeSeries' " + "after using 'includeTimeSeries' or 'includeAllTimeSeries'." + ) + + if any([constants.TimeSeries.ALL == x.name for x in hash_set_2]): + raise RuntimeError( + "IncludeBuilder: Cannot use 'includeTimeSeries' or 'includeAllTimeSeries' " + "after using 'includeAllTimeSeries'." + ) + # todo: more time series methods - def _include_time_series(self, alias: str, name: str, from_date: datetime.datetime, to_date: datetime.datetime): - if not name: - raise ValueError("Name cannot be empty") + def _include_time_series_from_to( + self, alias: str, name: str, from_date: datetime.datetime, to_date: datetime.datetime + ): + self._assert_valid(alias, name) - if self._time_series_to_include_by_source_alias is None: + if self.time_series_to_include_by_source_alias is None: + self._time_series_to_include_by_source_alias = {} + + hash_set = self.time_series_to_include_by_source_alias.get(alias, None) + if hash_set is None: + self.time_series_to_include_by_source_alias[alias] = set() # todo: comparer, define other set class + hash_set = self.time_series_to_include_by_source_alias[alias] + + range_ = TimeSeriesRange(name, from_date, to_date) + hash_set.add(range_) + + def _include_time_series_by_range_type_and_time( + self, alias: str, name: str, type_: TimeSeriesRangeType, time: TimeValue + ) -> None: + self._assert_valid(alias, name) + self._assert_valid_type(type_, time) + + if self.time_series_to_include_by_source_alias is None: self._time_series_to_include_by_source_alias = {} hash_set = self._time_series_to_include_by_source_alias.get(alias, None) - if not hash_set: + if hash_set is None: hash_set = set() self._time_series_to_include_by_source_alias[alias] = hash_set - hash_set.add(ravendb.documents.operations.time_series.TimeSeriesRange(name, from_date, to_date)) + time_range = TimeSeriesTimeRange(name, time, type_) + hash_set.add(time_range) + + def _include_time_series_by_range_type_and_count( + self, alias: str, name: str, type_: TimeSeriesRangeType, count: int + ) -> None: + self._assert_valid(alias, name) + self._assert_valid_type_and_count(type_, count) + + if self.time_series_to_include_by_source_alias is None: + self._time_series_to_include_by_source_alias = {} + + hash_set = self._time_series_to_include_by_source_alias.get(alias, None) + if hash_set is None: + hash_set = set() + self._time_series_to_include_by_source_alias[alias] = hash_set + + count_range = TimeSeriesCountRange(name, count, type_) + hash_set.add(count_range) + + def _include_array_of_time_series_by_range_type_and_count( + self, names: List[str], type_: TimeSeriesRangeType, count: int + ) -> None: + if names is None: + raise ValueError("Names cannot be None") + + for name in names: + self._include_time_series_by_range_type_and_count("", name, type_, count) + + def _include_array_of_time_series_by_range_type_and_time( + self, names: List[str], type_: TimeSeriesRangeType, time: TimeValue + ) -> None: + if names is None: + raise ValueError("Names cannot be None") + + for name in names: + self._include_time_series_by_range_type_and_time("", name, type_, time) + + @staticmethod + def _assert_valid_type_and_count(type_: TimeSeriesRangeType, count: int) -> None: + if type_ == TimeSeriesRangeType.NONE: + raise ValueError("Time range type cannot be set to NONE when time is specified.") + elif type_ == TimeSeriesRangeType.LAST: + if count <= 0: + raise ValueError("Count have to be positive") + else: + raise ValueError(f"Not supported time range type {type_.value}") + + @staticmethod + def _assert_valid_type(type_: TimeSeriesRangeType, time: TimeValue) -> None: + if type_ == TimeSeriesRangeType.NONE: + raise ValueError("Time range type cannot be set to NONE when time is specified.") + elif type_ == TimeSeriesRangeType.LAST: + if time is not None: + if time.value <= 0: + raise ValueError("Time range type cannot be set to LAST when time is negative or zero.") + + return + + raise ValueError("Time range type cannot be set to LAST when time is not specified.") + else: + raise RuntimeError(f"Not supported time range type: {type_}") class IncludeBuilder(IncludeBuilderBase): @@ -138,7 +263,39 @@ def include_time_series( to_date: Optional[datetime.datetime] = None, alias: Optional[str] = "", ) -> IncludeBuilderBase: - self._include_time_series(alias, name, from_date, to_date) + self._include_time_series_from_to(alias, name, from_date, to_date) + return self + + def include_time_series_by_range_type_and_time( + self, name: Optional[str], type_: TimeSeriesRangeType, time: TimeValue + ) -> IncludeBuilderBase: + self._include_time_series_by_range_type_and_time("", name, type_, time) + return self + + def include_time_series_by_range_type_and_count( + self, name: str, type_: TimeSeriesRangeType, count: int + ) -> IncludeBuilderBase: + self._include_time_series_by_range_type_and_count("", name, type_, count) + return self + + def include_array_of_time_series_by_range_type_and_time( + self, names: List[str], type_: TimeSeriesRangeType, time: TimeValue + ) -> IncludeBuilderBase: + self._include_array_of_time_series_by_range_type_and_time(names, type_, time) + return self + + def include_array_of_time_series_by_range_type_and_count( + self, names: List[str], type_: TimeSeriesRangeType, count: int + ) -> IncludeBuilderBase: + self._include_array_of_time_series_by_range_type_and_count(names, type_, count) + return self + + def include_all_time_series_by_time(self, type_: TimeSeriesRangeType, time: TimeValue) -> IncludeBuilderBase: + self._include_time_series_by_range_type_and_time("", constants.TimeSeries.ALL, type_, time) + return self + + def include_all_time_series_by_count(self, type_: TimeSeriesRangeType, count: int) -> IncludeBuilderBase: + self._include_time_series_by_range_type_and_count("", constants.TimeSeries.ALL, type_, count) return self def include_compare_exchange_value(self, path: str) -> IncludeBuilderBase: @@ -170,9 +327,12 @@ def include_time_series( to_date: Optional[datetime.datetime] = None, alias: Optional[str] = "", ): - self._include_time_series(alias, name, from_date, to_date) + self._include_time_series_from_to(alias, name, from_date, to_date) return self + def include_time_series_range_type_(self, name): + pass + def include_compare_exchange_value(self, path: str): self._include_compare_exchange_value(path) return self @@ -226,5 +386,5 @@ def include_tags(self) -> TimeSeriesIncludeBuilder: return self def include_document(self) -> TimeSeriesIncludeBuilder: - self._include_time_series_document = TimeSeriesIncludeBuilder + self._include_time_series_document = True return self diff --git a/ravendb/documents/session/operations/lazy.py b/ravendb/documents/session/operations/lazy.py index b5678148..b0024366 100644 --- a/ravendb/documents/session/operations/lazy.py +++ b/ravendb/documents/session/operations/lazy.py @@ -9,7 +9,7 @@ CompareExchangeValueResultParser, ) -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.queries.facets.misc import FacetResult from ravendb.documents.queries.index_query import IndexQuery from ravendb.documents.session.document_info import DocumentInfo diff --git a/ravendb/documents/session/operations/load_operation.py b/ravendb/documents/session/operations/load_operation.py index 319f9318..3835d519 100644 --- a/ravendb/documents/session/operations/load_operation.py +++ b/ravendb/documents/session/operations/load_operation.py @@ -1,6 +1,6 @@ from __future__ import annotations import logging -from typing import Optional, List, TYPE_CHECKING, Type, TypeVar +from typing import Optional, List, TYPE_CHECKING, Type, TypeVar, Set from ravendb.documents.commands.crud import GetDocumentsCommand, GetDocumentsResult from ravendb.documents.session.document_info import DocumentInfo @@ -78,7 +78,7 @@ def by_key(self, key: str): self._keys = [key] return self - def with_includes(self, includes: List[str]): + def with_includes(self, includes: Optional[Set[str]]): self._includes = includes return self diff --git a/ravendb/documents/session/operations/query.py b/ravendb/documents/session/operations/query.py index 70308fcb..15f70498 100644 --- a/ravendb/documents/session/operations/query.py +++ b/ravendb/documents/session/operations/query.py @@ -3,7 +3,7 @@ import logging from typing import Union, Optional, TypeVar, List, Type, Callable, Dict, TYPE_CHECKING -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.commands.query import QueryCommand from ravendb.documents.queries.index_query import IndexQuery from ravendb.documents.queries.query import QueryResult diff --git a/ravendb/documents/session/query.py b/ravendb/documents/session/query.py index 4d30b49a..e0aadf7d 100644 --- a/ravendb/documents/session/query.py +++ b/ravendb/documents/session/query.py @@ -19,7 +19,8 @@ TYPE_CHECKING, ) -from ravendb import constants +from ravendb.documents.session.time_series import TimeSeriesRange +from ravendb.primitives import constants from ravendb.documents.conventions import DocumentConventions from ravendb.documents.indexes.spatial.configuration import SpatialUnits, SpatialRelation from ravendb.documents.queries.explanation import Explanations, ExplanationOptions @@ -83,6 +84,7 @@ ShapeToken, SuggestToken, CounterIncludesToken, + TimeSeriesIncludesToken, ) from ravendb.documents.session.utils.document_query import DocumentQueryHelper from ravendb.documents.session.utils.includes_util import IncludesUtil @@ -147,6 +149,7 @@ def __init__( self._explanation_token: Optional[ExplanationToken] = None self._explanations: Optional[Explanations] = None self._counter_includes_tokens: Optional[List[CounterIncludesToken]] = None + self._time_series_includes_tokens: Optional[List[TimeSeriesIncludesToken]] = None self._before_query_executed_callback: List[Callable[[IndexQuery], None]] = [] self._after_query_executed_callback: List[Callable[[QueryResult], None]] = [ @@ -365,15 +368,18 @@ def _include(self, path_or_include_builder: Union[str, IncludeBuilderBase]) -> N includes = path_or_include_builder if includes is None: return - if includes._documents_to_include is not None: - self._document_includes.update(includes._documents_to_include) - - # todo: counters and time series includes - self._include_counters(includes._alias, includes._counters_to_include_by_source_path) - # if includes.time_series_to_include_by_source_alias is not None: - # self._include_time_series(includes.alias, includes.time_series_to_include_by_source_alias) - if includes._compare_exchange_values_to_include is not None: - for compare_exchange_value in includes._compare_exchange_values_to_include: + if includes.documents_to_include is not None: + self._document_includes.update(includes.documents_to_include) + + self._include_counters(includes.alias, includes.counters_to_include_by_source_path) + + if includes.time_series_to_include_by_source_alias is not None: + self._include_time_series(includes.alias, includes.time_series_to_include_by_source_alias) + + if includes.compare_exchange_values_to_include is not None: + self._compare_exchange_includes_tokens = [] + + for compare_exchange_value in includes.compare_exchange_values_to_include: self._compare_exchange_includes_tokens.append( CompareExchangeValueIncludesToken.create(compare_exchange_value) ) @@ -921,7 +927,7 @@ def __build_include(self, query_text: List[str]) -> None: and self._query_timings is None and not self._compare_exchange_includes_tokens and self._counter_includes_tokens is None - # todo: and self._time_series_includes_tokens is None + and self._time_series_includes_tokens is None ): return @@ -940,9 +946,8 @@ def __build_include(self, query_text: List[str]) -> None: else: query_text.append(include) - # todo: counters & time series first = self.__write_include_tokens(self._counter_includes_tokens, first, query_text) - # first = self.__write_include_tokens(self._time_series_includes_tokens, first, query_text) + first = self.__write_include_tokens(self._time_series_includes_tokens, first, query_text) first = self.__write_include_tokens(self._compare_exchange_includes_tokens, first, query_text) first = self.__write_include_tokens(self._highlighting_tokens, first, query_text) @@ -1265,7 +1270,10 @@ def add_alias_to_includes_tokens(self, from_alias: str) -> str: if self._counter_includes_tokens is not None: for counter_includes_token in self._counter_includes_tokens: counter_includes_token.add_alias_to_path(from_alias) - # todo: time series tokens + + if self._time_series_includes_tokens is not None: + for time_series_include_token in self._time_series_includes_tokens: + time_series_include_token.add_alias_to_path(from_alias) return from_alias @@ -1689,9 +1697,17 @@ def _include_counters( for name in value[1]: self._counter_includes_tokens.append(CounterIncludesToken.create(key, name)) - # todo: time series - # def _include_time_series(self, alias:str, time_series_to_include:Dict[str,Set[AbstractTimeSeriesRange]]) -> None: - # pass + def _include_time_series(self, alias: str, time_series_to_include: Dict[str, Set[TimeSeriesRange]]) -> None: + if not time_series_to_include: + return + + self._time_series_includes_tokens = [] + if self._includes_alias is None: + self._includes_alias = alias + + for ts_name, ranges in time_series_to_include.items(): + for ts_range in ranges: + self._time_series_includes_tokens.append(TimeSeriesIncludesToken.create(ts_name, ts_range)) class DocumentQuery(Generic[_T], AbstractDocumentQuery[_T]): diff --git a/ravendb/documents/session/time_series.py b/ravendb/documents/session/time_series.py index 65b2d29a..86320d69 100644 --- a/ravendb/documents/session/time_series.py +++ b/ravendb/documents/session/time_series.py @@ -1,16 +1,52 @@ from __future__ import annotations + +import abc import datetime +import inspect import math +from enum import Enum from typing import List, Dict, Type, Tuple, Any, TypeVar, Generic, Optional -from ravendb import constants +from ravendb.primitives import constants from ravendb.exceptions.raven_exceptions import RavenException +from ravendb.primitives.time_series import TimeValue from ravendb.tools.utils import Utils -_T = TypeVar("_T") +_T_TSBindable = TypeVar("_T_TSBindable") _T_Values = TypeVar("_T_Values") +class AbstractTimeSeriesRange(abc.ABC): + def __init__(self, name: str): + self.name = name + + +class TimeSeriesRange(AbstractTimeSeriesRange): + def __init__(self, name: str, from_date: Optional[datetime.datetime], to_date: Optional[datetime.datetime]): + super().__init__(name) + self.from_date = from_date + self.to_date = to_date + + +class TimeSeriesTimeRange(AbstractTimeSeriesRange): + def __init__(self, name: str, time: TimeValue, type: TimeSeriesRangeType): + super().__init__(name) + self.time = time + self.type = type + + +class TimeSeriesCountRange(AbstractTimeSeriesRange): + def __init__(self, name: str, count: int, type: TimeSeriesRangeType): + super().__init__(name) + self.count = count + self.type = type + + +class TimeSeriesRangeType(Enum): + NONE = "None" + LAST = "Last" + + class TypedTimeSeriesRollupEntry(Generic[_T_Values]): def __init__(self, object_type: Type[_T_Values], timestamp: datetime.datetime): self._object_type = object_type @@ -103,19 +139,19 @@ def _assign_rollup(self, target: List[float], source: _T_Values, offset: int) -> target[i * 6 + offset] = values[i] @classmethod - def from_entry(cls, object_type: Type[_T], entry: TimeSeriesEntry): - result = TypedTimeSeriesRollupEntry(object_type, entry.timestamp) + def from_entry(cls, ts_bindable_object_type: Type[_T_TSBindable], entry: TimeSeriesEntry): + result = TypedTimeSeriesRollupEntry(ts_bindable_object_type, entry.timestamp) result.rollup = True result.tag = entry.tag values = entry.values - result._first = TimeSeriesValuesHelper.set_fields(object_type, cls.extract_values(values, 0)) - result._last = TimeSeriesValuesHelper.set_fields(object_type, cls.extract_values(values, 1)) - result._min = TimeSeriesValuesHelper.set_fields(object_type, cls.extract_values(values, 2)) - result._max = TimeSeriesValuesHelper.set_fields(object_type, cls.extract_values(values, 3)) - result._sum = TimeSeriesValuesHelper.set_fields(object_type, cls.extract_values(values, 4)) - result._count = TimeSeriesValuesHelper.set_fields(object_type, cls.extract_values(values, 5)) + result._first = TimeSeriesValuesHelper.set_fields(ts_bindable_object_type, cls.extract_values(values, 0)) + result._last = TimeSeriesValuesHelper.set_fields(ts_bindable_object_type, cls.extract_values(values, 1)) + result._min = TimeSeriesValuesHelper.set_fields(ts_bindable_object_type, cls.extract_values(values, 2)) + result._max = TimeSeriesValuesHelper.set_fields(ts_bindable_object_type, cls.extract_values(values, 3)) + result._sum = TimeSeriesValuesHelper.set_fields(ts_bindable_object_type, cls.extract_values(values, 4)) + result._count = TimeSeriesValuesHelper.set_fields(ts_bindable_object_type, cls.extract_values(values, 5)) @staticmethod def extract_values(input: List[float], offset: int) -> List[float]: @@ -130,14 +166,14 @@ def extract_values(input: List[float], offset: int) -> List[float]: return result -class TypedTimeSeriesEntry(Generic[_T]): +class TypedTimeSeriesEntry(Generic[_T_TSBindable]): def __init__( self, timestamp: datetime.datetime = None, tag: str = None, values: List[int] = None, rollup: bool = None, - value: _T = None, + value: _T_TSBindable = None, ): self.timestamp = timestamp self.tag = tag @@ -186,60 +222,116 @@ def from_json(cls, json_dict: Dict[str, Any]): json_dict["IsRollup"], ) - def as_typed_entry(self, object_type: Type[_T]) -> TypedTimeSeriesEntry[_T]: + def as_typed_entry(self, ts_bindable_object_type: Type[_T_TSBindable]) -> TypedTimeSeriesEntry[_T_TSBindable]: return TypedTimeSeriesEntry( self.timestamp, self.tag, self.values, self.rollup, - TimeSeriesValuesHelper.set_fields(object_type, self.values, self.rollup), + TimeSeriesValuesHelper.set_fields(ts_bindable_object_type, self.values, self.rollup), ) +class ITimeSeriesValuesBindable(abc.ABC): + @abc.abstractmethod + def get_time_series_mapping(self) -> Dict[int, Tuple[str, Optional[str]]]: + # return a dictionary {index of time series value - (name of 'float' field, label)} + # e.g. return {0 : ('heart', 'Heartrate'), 1: ('bp', 'Blood Pressure')} + # for some class that has 'heart' and 'bp' float fields + raise NotImplementedError() + + class TimeSeriesValuesHelper: - _cache: Dict[Type, Dict[int, Tuple[Tuple[str, Any], str]]] = {} + _cache: Dict[Type, Dict[int, Tuple[str, str]]] = {} + + @staticmethod + def create_ts_bindable_class_instance(ts_bindable_object_type: Type[_T_TSBindable]) -> _T_TSBindable: + init_signature = inspect.signature(ts_bindable_object_type.__init__) + init_parameters = init_signature.parameters.values() + + # Create a dictionary of arguments with default values + arguments = { + param.name: param.default if param.default != param.empty else None + for param in init_parameters + if param.name != "self" + } + + instance = None + + try: + instance = ts_bindable_object_type(**arguments) + except Exception as e: + raise TypeError( + f"Failed to get time series fields mapping. " + f"Failed to resolve the class instance fields, " + f"initializing object of class '{ts_bindable_object_type.__name__}' using default args: '{arguments}'. " + f"Is the {ts_bindable_object_type.__name__}__init__() raising errors in some cases?", + e, + ) + return instance @staticmethod - def get_fields_mapping(object_type: Type) -> Dict[int, Tuple[str, str]]: - raise NotImplementedError() # return map of the float fields associated with time series value name + def get_fields_mapping(ts_bindable_object_type: Type[_T_TSBindable]) -> Optional[Dict[int, Tuple[str, str]]]: + if ts_bindable_object_type not in TimeSeriesValuesHelper._cache: + if not issubclass(ts_bindable_object_type, ITimeSeriesValuesBindable): + return None + + ts_bindable_class_instance = TimeSeriesValuesHelper.create_ts_bindable_class_instance( + ts_bindable_object_type + ) + ts_bindable_type_mapping = ts_bindable_class_instance.get_time_series_mapping() + + new_cache_entry = {} + for idx, field_name_and_ts_value_name in ts_bindable_type_mapping.items(): + field_name = field_name_and_ts_value_name[0] + time_series_value_name = field_name_and_ts_value_name[1] or field_name + new_cache_entry[idx] = (field_name, time_series_value_name) + + TimeSeriesValuesHelper._cache[ts_bindable_object_type] = new_cache_entry + return TimeSeriesValuesHelper._cache[ts_bindable_object_type] @staticmethod - def get_values(object_type: Type[_T], obj: _T) -> Optional[List[float]]: - mapping = TimeSeriesValuesHelper.get_fields_mapping(object_type) + def get_values(ts_bindable_object_type: Type[_T_TSBindable], obj: _T_TSBindable) -> Optional[List[float]]: + mapping = TimeSeriesValuesHelper.get_fields_mapping(ts_bindable_object_type) if mapping is None: return None try: values: List[Optional[float]] = [None] * len(mapping) for key, value in mapping.items(): - values[key] = obj.__dict__()[value[0]] # get field value + values[key] = obj.__dict__[value[0]] # get field value return values except Exception as e: raise RavenException("Unable to read time series values", e) @staticmethod - def set_fields(object_type: Type[_T], values: List[float], as_rollup: bool): + def set_fields( + ts_bindable_object_type: Type[_T_TSBindable], values: List[float], as_rollup: bool = False + ) -> _T_TSBindable: if values is None: return None - mapping = TimeSeriesValuesHelper.get_fields_mapping(object_type) + mapping = TimeSeriesValuesHelper.get_fields_mapping(ts_bindable_object_type) if mapping is None: return None - raise NotImplementedError() + try: + ts_bindable_class_instance = TimeSeriesValuesHelper.create_ts_bindable_class_instance( + ts_bindable_object_type + ) + for index, field_name_and_ts_name_tuple in mapping.items(): + value = None # Not-a-Number - @staticmethod - def set_fields(object_type: Type[_T], values: List[float], as_rollup: bool = False) -> _T: - if values is None: - return None + if index < len(values): + if as_rollup: + index *= 6 + value = values[index] - mapping = TimeSeriesValuesHelper.get_fields_mapping(object_type) - if mapping is None: - return None + field_name = field_name_and_ts_name_tuple[0] + ts_bindable_class_instance.__dict__[field_name] = value - try: - raise NotImplementedError() + return ts_bindable_class_instance except Exception as e: raise RavenException("Unable to read time series values.", e) diff --git a/ravendb/documents/session/tokens/query_tokens/definitions.py b/ravendb/documents/session/tokens/query_tokens/definitions.py index 7f929e04..a3c945df 100644 --- a/ravendb/documents/session/tokens/query_tokens/definitions.py +++ b/ravendb/documents/session/tokens/query_tokens/definitions.py @@ -2,9 +2,16 @@ import enum import os -from typing import List, Union, Optional, Tuple, TYPE_CHECKING - -from ravendb import constants +from typing import List, Union, Optional, Tuple + +from ravendb.documents.session.time_series import ( + TimeSeriesRange, + TimeSeriesTimeRange, + TimeSeriesRangeType, + AbstractTimeSeriesRange, + TimeSeriesCountRange, +) +from ravendb.primitives import constants from ravendb.documents.session.misc import OrderingType from ravendb.documents.indexes.spatial.configuration import SpatialUnits from ravendb.documents.queries.group_by import GroupByMethod @@ -15,9 +22,6 @@ from ravendb.documents.session.utils.document_query import DocumentQueryHelper from ravendb.tools.utils import Utils -if TYPE_CHECKING: - from ravendb.documents.operations.time_series import TimeSeriesRange - class CompareExchangeValueIncludesToken(QueryToken): def __init__(self, path: str): @@ -65,12 +69,12 @@ def write_to(self, writer: List[str]): class TimeSeriesIncludesToken(QueryToken): - def __init__(self, source_path: str, time_range: "TimeSeriesRange"): + def __init__(self, source_path: str, time_range: AbstractTimeSeriesRange): self.__range = time_range self.__source_path = source_path @classmethod - def create(cls, source_path: str, time_range: "TimeSeriesRange") -> TimeSeriesIncludesToken: + def create(cls, source_path: str, time_range: AbstractTimeSeriesRange) -> TimeSeriesIncludesToken: return cls(source_path, time_range) def add_alias_to_path(self, alias: str): @@ -81,9 +85,43 @@ def write_to(self, writer: List[str]): if self.__source_path: writer.append(f"{self.__source_path}, ") - writer.append(f"'{self.__range.name}', ") - writer.append(f"'{Utils.datetime_to_string(self.__range.from_date)}', " if self.__range.from_date else "null,") - writer.append(f"'{Utils.datetime_to_string(self.__range.to_date)}', " if self.__range.to_date else "null") + if self.__range.name: + writer.append(f"'{self.__range.name}', ") + + if isinstance(self.__range, TimeSeriesRange): + self.__range: TimeSeriesRange + + writer.append( + f"'{Utils.datetime_to_string(self.__range.from_date)}', " if self.__range.from_date else "null," + ) + writer.append(f"'{Utils.datetime_to_string(self.__range.to_date)}', " if self.__range.to_date else "null") + + elif isinstance(self.__range, TimeSeriesTimeRange): + self.__range: TimeSeriesTimeRange + + if self.__range.type == TimeSeriesRangeType.LAST: + writer.append("last(") + else: + raise ValueError(f"Not supported time range type: {str(self.__range.type)}") + writer.append(str(self.__range.time.value)) + writer.append(", '") + writer.append(self.__range.time.unit.value) + writer.append("')") + + elif isinstance(self.__range, TimeSeriesCountRange): + self.__range: TimeSeriesCountRange + + if self.__range.type == TimeSeriesRangeType.LAST: + writer.append("last(") + else: + raise ValueError(f"Not supported time range type: {str(self.__range.type)}") + + writer.append(str(self.__range.count)) + writer.append(")") + + else: + raise ValueError(f"Not supported time range type: {str(self.__range.__class__.__name__)}") + writer.append(")") diff --git a/ravendb/documents/subscriptions/worker.py b/ravendb/documents/subscriptions/worker.py index 11be7098..4aef5c94 100644 --- a/ravendb/documents/subscriptions/worker.py +++ b/ravendb/documents/subscriptions/worker.py @@ -12,7 +12,7 @@ from socket import socket from typing import TypeVar, Generic, Type, Optional, Callable, Dict, List, TYPE_CHECKING, Any -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.session.entity_to_json import EntityToJson from ravendb.documents.commands.subscriptions import GetTcpInfoForRemoteTaskCommand, TcpConnectionInfo from ravendb.documents.session.document_session_operations.in_memory_document_session_operations import ( diff --git a/ravendb/documents/time_series.py b/ravendb/documents/time_series.py index 7579daae..260fe6b9 100644 --- a/ravendb/documents/time_series.py +++ b/ravendb/documents/time_series.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, Optional, Type, TypeVar, List from ravendb.documents.conventions import DocumentConventions -from ravendb.documents.session.time_series import TimeSeriesValuesHelper +from ravendb.documents.session.time_series import TimeSeriesValuesHelper, ITimeSeriesValuesBindable from ravendb.documents.operations.time_series import ( ConfigureTimeSeriesValueNamesOperation, TimeSeriesPolicy, @@ -14,24 +14,12 @@ ) _T_Collection = TypeVar("_T_Collection") -_T_Time_Series_Entry = TypeVar("_T_Time_Series_Entry") +_T_TS_Values_Bindable = TypeVar("_T_TS_Values_Bindable", bound=ITimeSeriesValuesBindable) if TYPE_CHECKING: from ravendb import DocumentStore -def time_series_value(idx: int, name: Optional[str] = ""): - if -127 > idx or idx > 127: - raise ValueError("idx must be between 0 and 127") - - def decorator(field): - field.idx = idx - field.name = name - return field - - return decorator - - class TimeSeriesOperations: def __init__(self, store: "DocumentStore", database: Optional[str] = None): self._store = store @@ -41,17 +29,17 @@ def __init__(self, store: "DocumentStore", database: Optional[str] = None): def register_type( self, collection_class: Type[_T_Collection], - time_series_entry_class: Type[_T_Time_Series_Entry], + ts_bindable_object_type: Type[_T_TS_Values_Bindable], name: Optional[str] = None, ): if name is None: - name = self.get_time_series_name(time_series_entry_class, self._store.conventions) + name = self.get_time_series_name(ts_bindable_object_type, self._store.conventions) - mapping = TimeSeriesValuesHelper.get_fields_mapping(time_series_entry_class) + mapping = TimeSeriesValuesHelper.get_fields_mapping(ts_bindable_object_type) if mapping is None: raise RuntimeError( - f"{self.get_time_series_name(time_series_entry_class, self._store.conventions)} " - f"must contain @{time_series_value.__name__} decorated fields" + f"{self.get_time_series_name(ts_bindable_object_type, self._store.conventions)} " + f"must implement {ITimeSeriesValuesBindable.__name__}" ) collection = self._store.conventions.find_collection_name(collection_class) @@ -95,8 +83,8 @@ def remove_policy_collection_name(self, collection: str, name: str) -> None: self._executor.send(RemoveTimeSeriesPolicyOperation(collection, name)) @staticmethod - def get_time_series_name(object_type: Type[_T_Time_Series_Entry], conventions: DocumentConventions): - return conventions.find_collection_name(object_type) + def get_time_series_name(ts_bindable_object_type: Type[_T_TS_Values_Bindable], conventions: DocumentConventions): + return conventions.find_collection_name(ts_bindable_object_type) def for_database(self, database: str) -> TimeSeriesOperations: if self._database.lower() == database.lower(): diff --git a/ravendb/extensions/http_extensions.py b/ravendb/extensions/http_extensions.py index 90caec2b..6ecdb170 100644 --- a/ravendb/extensions/http_extensions.py +++ b/ravendb/extensions/http_extensions.py @@ -2,7 +2,7 @@ import requests -from ravendb import constants +from ravendb.primitives import constants class HttpExtensions: diff --git a/ravendb/extensions/json_extensions.py b/ravendb/extensions/json_extensions.py index afd4e9d9..2c99ce37 100644 --- a/ravendb/extensions/json_extensions.py +++ b/ravendb/extensions/json_extensions.py @@ -1,13 +1,10 @@ -import datetime -import enum import json from typing import Dict -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.conventions import DocumentConventions from ravendb.documents.queries.index_query import IndexQuery from ravendb.documents.queries.query import ProjectionBehavior -from ravendb.tools.utils import Utils class JsonExtensions: diff --git a/ravendb/http/request_executor.py b/ravendb/http/request_executor.py index cb48df4e..ddce33a6 100644 --- a/ravendb/http/request_executor.py +++ b/ravendb/http/request_executor.py @@ -12,7 +12,7 @@ import requests from copy import copy -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.session.event_args import BeforeRequestEventArgs, FailedRequestEventArgs, SucceedRequestEventArgs from ravendb.exceptions.exceptions import ( AllTopologyNodesDownException, diff --git a/ravendb/json/json_operation.py b/ravendb/json/json_operation.py index 6283744d..7d33e5b4 100644 --- a/ravendb/json/json_operation.py +++ b/ravendb/json/json_operation.py @@ -1,11 +1,8 @@ from typing import Dict, List -import ravendb.tools.utils - -import ravendb.constants as constants +import ravendb.primitives.constants as constants from ravendb.documents.session.document_info import DocumentInfo from ravendb.documents.session.misc import DocumentsChanges -from ravendb.tools.utils import Utils class JsonOperation: diff --git a/ravendb/json/metadata_as_dictionary.py b/ravendb/json/metadata_as_dictionary.py index b6aacef8..4b507bc6 100644 --- a/ravendb/json/metadata_as_dictionary.py +++ b/ravendb/json/metadata_as_dictionary.py @@ -2,7 +2,7 @@ from typing import Optional -from ravendb import constants +from ravendb.primitives import constants class MetadataAsDictionary: diff --git a/ravendb/misc.py b/ravendb/misc.py index 12a1b15e..e69de29b 100644 --- a/ravendb/misc.py +++ b/ravendb/misc.py @@ -1,16 +0,0 @@ -from typing import TypeVar, Generic - -_T = TypeVar("_T") - - -class Reference(Generic[_T]): - def __init__(self, value: _T = None): - self.__value = [value] - - @property - def value(self) -> _T: - return self.__value[0] - - @value.setter - def value(self, value: _T): - self.__value[0] = value diff --git a/ravendb/constants.py b/ravendb/primitives/constants.py similarity index 92% rename from ravendb/constants.py rename to ravendb/primitives/constants.py index d481450f..ea8fca1a 100644 --- a/ravendb/constants.py +++ b/ravendb/primitives/constants.py @@ -1,6 +1,7 @@ import sys -int_max = 0x7FFFFFF +int_min = 0x80000000 +int_max = 0x7FFFFFFF min_normal = sys.float_info.min json_serialize_method_name = "to_json" nan_value = float("nan") @@ -68,3 +69,10 @@ class Headers: TRANSFER_ENCODING = "Transfer-Encoding" CONTENT_ENCODING = "Content-Encoding" CONTENT_LENGTH = "Content-Length" + + +class TimeSeries: + SELECT_FIELD_NAME = "timeseries" + QUERY_FUNCTION = "__timeSeriesQueryFunction" + + ALL = "@all_timeseries" diff --git a/ravendb/primitives/time_series.py b/ravendb/primitives/time_series.py new file mode 100644 index 00000000..bf864b84 --- /dev/null +++ b/ravendb/primitives/time_series.py @@ -0,0 +1,226 @@ +from __future__ import annotations +from enum import Enum +from typing import List, Tuple + +from ravendb.primitives.constants import int_max, int_min + + +class TimeValueUnit(Enum): + NONE = "None" + SECOND = "Second" + MONTH = "Month" + + +class TimeValue: + SECONDS_PER_DAY = 86400 + SECONDS_IN_28_DAYS = 28 * SECONDS_PER_DAY # lower-bound of seconds in month + SECONDS_IN_31_DAYS = 31 * SECONDS_PER_DAY # upper-bound of seconds in month + SECONDS_IN_365_DAYS = 365 * SECONDS_PER_DAY # lower-bound of seconds in year + SECONDS_IN_366_DAYS = 366 * SECONDS_PER_DAY # upper-bound of seconds in year + + def __init__(self, value: int, unit: TimeValueUnit): + self.value = value + self.unit = unit + + def __str__(self): + if self.value == int_max: + return "MaxValue" + if self.value == int_min: + return "MinValue" + if self.value == 0: + return "Zero" + + if self.unit == TimeValueUnit.NONE: + return "Unknown time unit" + + builder = [] + + if self.unit == TimeValueUnit.SECOND: + remaining_seconds = self.value + + if remaining_seconds > self.SECONDS_PER_DAY: + days = self.value // self.SECONDS_PER_DAY + self._append(builder, days, "day") + remaining_seconds -= days * self.SECONDS_PER_DAY + + if remaining_seconds > 3600: + hours = remaining_seconds // 3600 + self._append(builder, hours, "hour") + remaining_seconds -= hours * 3600 + + if remaining_seconds > 60: + minutes = remaining_seconds // 60 + self._append(builder, minutes, "minute") + remaining_seconds -= minutes * 60 + + if remaining_seconds > 0: + self._append(builder, remaining_seconds, "second") + + elif self.unit == TimeValueUnit.MONTH: + if self.value >= 12: + self._append(builder, self.value // 12, "year") + if self.value % 12 > 0: + self._append(builder, self.value % 12, "month") + + else: + raise ValueError(f"Not supported unit: {self.unit}") + + return "".join(builder).strip() + + @classmethod + def ZERO(cls): + return cls(0, TimeValueUnit.NONE) + + @classmethod + def MAX_VALUE(cls): + return cls(int_max, TimeValueUnit.NONE) + + @classmethod + def MIN_VALUE(cls): + return cls(int_min, TimeValueUnit.NONE) + + @classmethod + def of_seconds(cls, seconds: int) -> TimeValue: + return cls(seconds, TimeValueUnit.SECOND) + + @classmethod + def of_minutes(cls, minutes: int) -> TimeValue: + return cls(minutes * 60, TimeValueUnit.SECOND) + + @classmethod + def of_hours(cls, hours: int) -> TimeValue: + return cls(hours * 3600, TimeValueUnit.SECOND) + + @classmethod + def of_days(cls, days: int) -> TimeValue: + return cls(days * cls.SECONDS_PER_DAY, TimeValueUnit.SECOND) + + @classmethod + def of_months(cls, months: int) -> TimeValue: + return cls(months, TimeValueUnit.MONTH) + + @classmethod + def of_years(cls, years: int) -> TimeValue: + return cls(12 * years, TimeValueUnit.MONTH) + + def _append(self, builder: List[str], value: int, singular: str) -> None: + if value <= 0: + return + + builder.append(str(value)) + builder.append(" ") + builder.append(singular) + + if value == 1: + builder.append(" ") + return + + builder.append("s ") # lucky me, no special rules here + + def _assert_seconds(self) -> None: + if self.unit != TimeValueUnit.SECOND: + raise ValueError("The value must be seconds") + + @staticmethod + def _assert_valid_unit(unit: TimeValueUnit) -> None: + if unit == TimeValueUnit.MONTH or unit == TimeValueUnit.SECOND: + return + + raise ValueError(f"Invalid time unit: {unit}") + + @staticmethod + def _assert_same_units(a: TimeValue, b: TimeValue) -> None: + if a.unit != b.unit: + raise ValueError(f"Unit isn't the same {a.unit} != {b.unit}") + + @classmethod + def _get_bounds_in_seconds(cls, time: TimeValue) -> Tuple[int, int]: + if time.unit == TimeValueUnit.SECOND: + return time.value, time.value + elif time.unit == TimeValueUnit.MONTH: + years = time.value // 12 + upper_bound = years * cls.SECONDS_IN_366_DAYS + lower_bound = years * cls.SECONDS_IN_365_DAYS + + remaining_months = time.value % 12 + upper_bound += remaining_months * cls.SECONDS_IN_31_DAYS + lower_bound += remaining_months * cls.SECONDS_IN_28_DAYS + + return lower_bound, upper_bound + else: + raise ValueError(f"Not supported time value unit: {time.unit}") + + def compare_to(self, other: TimeValue) -> int: + if self.value == 0 or other.value == 0: + return self.value - other.value + + condition, result = self.is_special_compare(self, other) + if condition: + return result + + if self.unit == other.unit: + return self._trim_compare_result(self.value - other.value) + + my_bounds = self._get_bounds_in_seconds(self) + other_bounds = self._get_bounds_in_seconds(other) + + if other_bounds[1] < my_bounds[0]: + return 1 + + if other_bounds[0] > my_bounds[1]: + return -1 + + raise ValueError(f"Unable to compare {self} with {other}, since a month might have different number of days.") + + @staticmethod + def _is_max(time: TimeValue) -> bool: + return time.unit == TimeValueUnit.NONE and time.value == int_max + + @staticmethod + def _is_min(time: TimeValue) -> bool: + return time.unit == TimeValueUnit.NONE and time.value == int_min + + @staticmethod + def _trim_compare_result(result: int) -> int: + if result > int_max: + return int_max + + if result < int_min: + return int_min + + return result + + def __eq__(self, o: object): + if self == o: + return True + if o is None or self.__class__ != o.__class__: + return False + other: TimeValue = o + return self.compare_to(other) == 0 + + def __hash__(self): + if self.value == 0 or self.value == int_min or self.value == int_max: + return hash(self.value) + return hash((self.value, self.unit)) + + @classmethod + def is_special_compare(cls, current: TimeValue, other: TimeValue) -> Tuple[bool, int]: + result = 0 + + if cls._is_max(current): + result = 0 if cls._is_max(other) else 1 + return True, result + + if cls._is_max(other): + result = 0 if cls._is_max(current) else -1 + return True, result + + if cls._is_min(current): + result = 0 if cls._is_min(other) else -1 + return True, result + + if cls._is_min(other): + result = 0 if cls._is_min(current) else 1 + return True, result + + return False, result diff --git a/ravendb/serverwide/operations/common.py b/ravendb/serverwide/operations/common.py index 8aab0239..82a989e2 100644 --- a/ravendb/serverwide/operations/common.py +++ b/ravendb/serverwide/operations/common.py @@ -7,7 +7,7 @@ from typing import Generic, TypeVar, TYPE_CHECKING, Optional, List, Dict import requests -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.operations.operation import Operation from ravendb.serverwide.database_record import DatabaseRecordWithEtag, DatabaseRecord from ravendb.serverwide.misc import DatabaseTopology diff --git a/ravendb/tests/jvm_migrated_tests/attachments_tests/test_attachments_session.py b/ravendb/tests/jvm_migrated_tests/attachments_tests/test_attachments_session.py index 69624862..f9b72534 100644 --- a/ravendb/tests/jvm_migrated_tests/attachments_tests/test_attachments_session.py +++ b/ravendb/tests/jvm_migrated_tests/attachments_tests/test_attachments_session.py @@ -1,4 +1,4 @@ -from ravendb import constants +from ravendb.primitives import constants from ravendb.exceptions.exceptions import InvalidOperationException from ravendb.documents.commands.batches import DeleteCommandData from ravendb.documents.operations.attachments import DeleteAttachmentOperation diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/counters_tests/test_counters_single_node.py b/ravendb/tests/jvm_migrated_tests/client_tests/counters_tests/test_counters_single_node.py index c094142e..49cef188 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/counters_tests/test_counters_single_node.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/counters_tests/test_counters_single_node.py @@ -1,4 +1,4 @@ -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.operations.counters import ( DocumentCountersOperation, CounterOperation, diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/indexing_tests/test_java_script_index.py b/ravendb/tests/jvm_migrated_tests/client_tests/indexing_tests/test_java_script_index.py index c9f7af69..76f210c3 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/indexing_tests/test_java_script_index.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/indexing_tests/test_java_script_index.py @@ -1,6 +1,7 @@ from typing import List -from ravendb import IndexFieldOptions, constants +from ravendb import IndexFieldOptions +from ravendb.primitives import constants from ravendb.documents.indexes.definitions import FieldIndexing from ravendb.documents.indexes.index_creation import AbstractJavaScriptIndexCreationTask from ravendb.infrastructure.entities import User diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/test_bulk_inserts.py b/ravendb/tests/jvm_migrated_tests/client_tests/test_bulk_inserts.py index 3ed803c6..a725dca1 100644 --- a/ravendb/tests/jvm_migrated_tests/client_tests/test_bulk_inserts.py +++ b/ravendb/tests/jvm_migrated_tests/client_tests/test_bulk_inserts.py @@ -1,7 +1,8 @@ import datetime import time -from ravendb import MetadataAsDictionary, constants +from ravendb import MetadataAsDictionary +from ravendb.primitives import constants from ravendb.exceptions.documents.bulkinsert import BulkInsertAbortedException from ravendb.tests.test_base import TestBase diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py new file mode 100644 index 00000000..a35c2065 --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_includes.py @@ -0,0 +1,874 @@ +from datetime import datetime, timedelta + +from ravendb.documents.session.time_series import TimeSeriesRangeType +from ravendb.infrastructure.orders import Company, Order +from ravendb.primitives.time_series import TimeValue +from ravendb.tests.test_base import TestBase, User + +document_id = "users/gracjan" +company_id = "companies/1-A" +order_id = "orders/1-A" +base_line = datetime(2023, 8, 20, 21, 30) +ts_name1 = "Heartrate" +ts_name2 = "Speedrate" +tag1 = "watches/fitbit" +tag2 = "watches/apple" +tag3 = "watches/bitfit" + + +class TestTimeSeriesIncludes(TestBase): + def setUp(self): + super(TestTimeSeriesIncludes, self).setUp() + + def test_should_cache_empty_time_series_ranges(self): + with self.store.open_session() as session: + user = User(name="Gracjan") + session.store(user, document_id) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, "Heartrate") + for i in range(360): + tsf.append_single(base_line + timedelta(seconds=i * 10), 6, "watches/fitbit") + + session.save_changes() + + with self.store.open_session() as session: + user = session.load( + document_id, + User, + lambda i: i.include_time_series( + "Heartrate", base_line - timedelta(minutes=30), base_line - timedelta(minutes=10) + ), + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual("Gracjan", user.name) + + # should not go to server + + vals = session.time_series_for(document_id, "Heartrate").get( + base_line - timedelta(minutes=30), base_line - timedelta(minutes=10) + ) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(0, len(vals)) + + cache = session.time_series_by_doc_id.get(document_id) + ranges = cache.get("Heartrate") + self.assertEqual(1, len(ranges)) + + self.assertEqual(0, len(ranges[0].entries)) + + self.assertEqual(base_line - timedelta(minutes=30), ranges[0].from_date) + self.assertEqual(base_line - timedelta(minutes=10), ranges[0].to_date) + + # should not go to server + + vals = session.time_series_for(document_id, "Heartrate").get( + base_line - timedelta(minutes=25), base_line - timedelta(minutes=15) + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(0, len(vals)) + + session.advanced.evict(user) + + user = session.load( + document_id, + User, + lambda i: i.include_time_series( + "BloodPressure", base_line + timedelta(minutes=10), base_line + timedelta(minutes=30) + ), + ) + + self.assertEqual(2, session.advanced.number_of_requests) + + # should not go to server + + vals = session.time_series_for(document_id, "BloodPressure").get( + base_line + timedelta(minutes=10), base_line + timedelta(minutes=30) + ) + + self.assertEqual(2, session.advanced.number_of_requests) + + self.assertEqual(0, len(vals)) + + cache = session.time_series_by_doc_id.get(document_id) + ranges = cache.get("BloodPressure") + self.assertEqual(1, len(ranges)) + self.assertEqual(0, len(ranges[0].entries)) + + self.assertEqual(base_line + timedelta(minutes=10), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=30), ranges[0].to_date) + + def test_session_load_with_include_time_series(self): + with self.store.open_session() as session: + company = Company(name="HR") + session.store(company, "companies/1-A") + + order = Order(company="companies/1-A") + session.store(order, "orders/1-A") + + tsf = session.time_series_for("orders/1-A", "Heartrate") + tsf.append_single(base_line, 67, "watches/apple") + tsf.append_single(base_line + timedelta(minutes=5), 64, "watches/apple") + tsf.append_single(base_line + timedelta(minutes=10), 65, "watches/fitbit") + + session.save_changes() + + with self.store.open_session() as session: + order = session.load( + "orders/1-A", Order, lambda i: i.include_documents("company").include_time_series("Heartrate") + ) + + company = session.load(order.company, Company) + self.assertEqual("HR", company.name) + + # should not go to server + values = session.time_series_for_entity(order, "Heartrate").get() + + self.assertEqual(3, len(values)) + + self.assertEqual(1, len(values[0].values)) + self.assertEqual(67, values[0].values[0]) + self.assertEqual("watches/apple", values[0].tag) + self.assertEqual(base_line, values[0].timestamp) + + self.assertEqual(1, len(values[1].values)) + self.assertEqual(64, values[1].values[0]) + self.assertEqual("watches/apple", values[1].tag) + self.assertEqual(base_line + timedelta(minutes=5), values[1].timestamp) + + self.assertEqual(1, len(values[2].values)) + self.assertEqual(65, values[2].values[0]) + self.assertEqual("watches/fitbit", values[2].tag) + self.assertEqual(base_line + timedelta(minutes=10), values[2].timestamp) + + self.assertEqual(1, session.advanced.number_of_requests) + + def test_include_time_series_and_update_existing_range_in_cache(self): + with self.store.open_session() as session: + user = User(name="Gracjan") + session.store(user, document_id) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, "Heartrate") + for i in range(360): + tsf.append_single(base_line + timedelta(seconds=10 * i), 6, "watches/fitbit") + + session.save_changes() + + with self.store.open_session() as session: + vals = session.time_series_for(document_id, "Heartrate").get( + base_line + timedelta(minutes=2), base_line + timedelta(minutes=10) + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(49, len(vals)) + self.assertEqual(base_line + timedelta(minutes=2), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=10), vals[48].timestamp) + + session.time_series_for(document_id, "Heartrate").append_single( + base_line + timedelta(minutes=3, seconds=3), 6, "watches/fitbit" + ) + session.save_changes() + + self.assertEqual(2, session.advanced.number_of_requests) + + user = session.load( + document_id, + User, + lambda i: i.include_time_series( + "Heartrate", base_line + timedelta(minutes=3), base_line + timedelta(minutes=5) + ), + ) + + self.assertEqual(3, session.advanced.number_of_requests) + + # should not go to server + + vals = session.time_series_for(document_id, "Heartrate").get( + base_line + timedelta(minutes=3), base_line + timedelta(minutes=5) + ) + + self.assertEqual(14, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=3), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=3, seconds=3), vals[1].timestamp) + self.assertEqual(base_line + timedelta(minutes=5), vals[13].timestamp) + + def test_include_multiple_time_series(self): + with self.store.open_session() as session: + user = User(name="Gracjan") + session.store(user, document_id) + session.save_changes() + + with self.store.open_session() as session: + for i in range(360): + session.time_series_for(document_id, "Heartrate").append_single( + base_line + timedelta(seconds=i * 10), 6, "watches/fitbit" + ) + session.time_series_for(document_id, "BloodPressure").append_single( + base_line + timedelta(seconds=i * 10), 66, "watches/fitbit" + ) + session.time_series_for(document_id, "Nasdaq").append_single( + base_line + timedelta(seconds=i * 10), 8097.23, "nasdaq.com" + ) + + session.save_changes() + + with self.store.open_session() as session: + session.load( + document_id, + User, + lambda i: i.include_time_series( + "Heartrate", base_line + timedelta(minutes=3), base_line + timedelta(minutes=5) + ) + .include_time_series( + "BloodPressure", base_line + timedelta(minutes=40), base_line + timedelta(minutes=45) + ) + .include_time_series("Nasdaq", base_line + timedelta(minutes=15), base_line + timedelta(minutes=25)), + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual("Gracjan", user.name) + + # should not go to server + + vals = session.time_series_for(document_id, "Heartrate").get( + base_line + timedelta(minutes=3), base_line + timedelta(minutes=5) + ) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(13, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=3), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=5), vals[12].timestamp) + + # should not go to server + + vals = session.time_series_for(document_id, "BloodPressure").get( + base_line + timedelta(minutes=40), base_line + timedelta(minutes=45) + ) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(31, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=40), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=45), vals[30].timestamp) + + # should not go to server + + vals = session.time_series_for(document_id, "Nasdaq").get( + base_line + timedelta(minutes=15), base_line + timedelta(minutes=25) + ) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(61, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=15), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=25), vals[60].timestamp) + + def test_include_time_series_and_merge_with_existing_ranges_in_cache(self): + with self.store.open_session() as session: + session.store(User(name="Gracjan"), document_id) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, ts_name1) + for i in range(360): + tsf.append_single(base_line + timedelta(seconds=10 * i), 6, "watches/fitbit") + + session.save_changes() + + with self.store.open_session() as session: + vals = session.time_series_for(document_id, ts_name1).get( + base_line + timedelta(minutes=2), base_line + timedelta(minutes=10) + ) + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual(49, len(vals)) + self.assertEqual(base_line + timedelta(minutes=2), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=10), vals[48].timestamp) + + user = session.load( + document_id, + User, + lambda i: i.include_time_series( + ts_name1, base_line + timedelta(minutes=40), base_line + timedelta(minutes=50) + ), + ) + self.assertEqual(2, session.advanced.number_of_requests) + + # should not go to server + vals = session.time_series_for(document_id, ts_name1).get( + base_line + timedelta(minutes=40), base_line + timedelta(minutes=50) + ) + self.assertEqual(2, session.advanced.number_of_requests) + + self.assertEqual(61, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=40), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=50), vals[60].timestamp) + + cache = session.time_series_by_doc_id.get(document_id) + self.assertIsNotNone(cache) + ranges = cache.get(ts_name1) + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=2), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=10), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # we intentionally evict just the document (without it's TS data), + # so that Load request will go to server + + session.documents_by_entity.evict(user) + session.documents_by_id.remove(document_id) + + # should go to server to get [0, 2] and merge it into existing [2, 10] + user = session.load( + document_id, + User, + lambda i: i.include_time_series(ts_name1, base_line, base_line + timedelta(minutes=2)), + ) + + self.assertEqual(3, session.advanced.number_of_requests) + + # should not go to server + + vals = session.time_series_for(document_id, ts_name1).get(base_line, base_line + timedelta(minutes=2)) + + self.assertEqual(3, session.advanced.number_of_requests) + + self.assertEqual(13, len(vals)) + self.assertEqual(base_line, vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=2), vals[12].timestamp) + + self.assertEqual(2, len(ranges)) + self.assertEqual(base_line + timedelta(minutes=0), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=10), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # evict just the document + session.documents_by_entity.evict(user) + session.documents_by_id.remove(document_id) + + # should go to server to get [10, 16] and merge it into existing [0, 10] + user = session.load( + document_id, + User, + lambda i: i.include_time_series( + ts_name1, base_line + timedelta(minutes=10), base_line + timedelta(minutes=16) + ), + ) + + self.assertEqual(4, session.advanced.number_of_requests) + + # should not go to server + vals = session.time_series_for(document_id, ts_name1).get( + base_line + timedelta(minutes=10), base_line + timedelta(minutes=16) + ) + self.assertEqual(4, session.advanced.number_of_requests) + + self.assertEqual(37, len(vals)) + self.assertEqual(base_line + timedelta(minutes=10), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=16), vals[36].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=16), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # evict just the document + session.documents_by_entity.evict(user) + session.documents_by_id.remove(document_id) + + # should go to server to get range [17, 19] + # and add it to cache in between [10, 16] and [40, 50] + + user = session.load( + document_id, + User, + lambda i: i.include_time_series( + ts_name1, base_line + timedelta(minutes=17), base_line + timedelta(minutes=19) + ), + ) + + self.assertEqual(5, session.advanced.number_of_requests) + + # should not go to server + + vals = session.time_series_for(document_id, ts_name1).get( + base_line + timedelta(minutes=17), base_line + timedelta(minutes=19) + ) + + self.assertEqual(5, session.advanced.number_of_requests) + + self.assertEqual(13, len(vals)) + self.assertEqual(base_line + timedelta(minutes=17), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=19), vals[12].timestamp) + + self.assertEqual(3, len(ranges)) + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=16), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=17), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=19), ranges[1].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[2].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[2].to_date) + + # evict just the document + session.documents_by_entity.evict(user) + session.documents_by_id.remove(document_id) + + # should go to server to get range [19, 40] + # and merge the result with existing ranges [17, 19] and [40, 50] + # into single range [17, 50] + + user = session.load( + document_id, + User, + lambda i: i.include_time_series( + ts_name1, base_line + timedelta(minutes=18), base_line + timedelta(minutes=48) + ), + ) + + self.assertEqual(6, session.advanced.number_of_requests) + + # should not go to server + + vals = session.time_series_for(document_id, ts_name1).get( + base_line + timedelta(minutes=18), base_line + timedelta(minutes=48) + ) + + self.assertEqual(6, session.advanced.number_of_requests) + + self.assertEqual(181, len(vals)) + self.assertEqual(base_line + timedelta(minutes=18), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=48), vals[180].timestamp) + + self.assertEqual(2, len(ranges)) + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=16), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=17), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # evict just the document + session.documents_by_entity.evict(user) + session.documents_by_id.remove(document_id) + + # should go to server to get range [12, 22] + # and merge the result with existing ranges [0, 16] and [17, 50] + # into single range [0, 50] + + user = session.load( + document_id, + User, + lambda i: i.include_time_series( + ts_name1, base_line + timedelta(minutes=12), base_line + timedelta(minutes=22) + ), + ) + + self.assertEqual(7, session.advanced.number_of_requests) + + # should not go to server + + vals = session.time_series_for(document_id, ts_name1).get( + base_line + timedelta(minutes=12), base_line + timedelta(minutes=22) + ) + + self.assertEqual(7, session.advanced.number_of_requests) + + self.assertEqual(61, len(vals)) + self.assertEqual(base_line + timedelta(minutes=12), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=22), vals[60].timestamp) + + self.assertEqual(1, len(ranges)) + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[0].to_date) + + # evict just the document + session.documents_by_entity.evict(user) + session.documents_by_id.remove(document_id) + + # should go to server to get range [50, ∞] + # and merge the result with existing range [0, 50] into single range [0, ∞] + + user = session.load( + document_id, + User, + lambda i: i.include_time_series_by_range_type_and_time( + ts_name1, TimeSeriesRangeType.LAST, TimeValue.of_minutes(10) + ), + ) + + self.assertEqual(8, session.advanced.number_of_requests) + + # should not go to server + + vals = session.time_series_for(document_id, ts_name1).get(base_line + timedelta(minutes=50), None) + + self.assertEqual(8, session.advanced.number_of_requests) + + # should not go to server + + vals = session.time_series_for(document_id, ts_name1).get(base_line + timedelta(minutes=50), None) + + self.assertEqual(8, session.advanced.number_of_requests) + + self.assertEqual(60, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=50), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=59, seconds=50), vals[59].timestamp) + + self.assertEqual(1, len(ranges)) + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(None, ranges[0].to_date) + + def test_query_with_include_time_series(self): + with self.store.open_session() as session: + session.store(User(name="Gracjan"), document_id) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, ts_name1) + + for i in range(360): + tsf.append_single(base_line + timedelta(seconds=i * 10), 67, tag1) + + session.save_changes() + + with self.store.open_session() as session: + query = session.query(object_type=User).include(lambda i: i.include_time_series(ts_name1)) + + result = list(query) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual("Gracjan", result[0].name) + + # should not go to server + + vals = session.time_series_for(document_id, ts_name1).get(base_line, base_line + timedelta(minutes=30)) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(181, len(vals)) + self.assertEqual(base_line, vals[0].timestamp) + self.assertEqual(tag1, vals[0].tag) + self.assertEqual(67, vals[0].values[0]) + self.assertEqual(base_line + timedelta(minutes=30), vals[180].timestamp) + + def test_can_load_async_with_include_time_series_last_range_by_count(self): + with self.store.open_session() as session: + session.store(Company(name="HR"), company_id) + session.store(Order(company=company_id), order_id) + tsf = session.time_series_for(order_id, ts_name1) + + for i in range(15): + tsf.append_single(base_line - timedelta(minutes=i), i, tag1) + + session.save_changes() + + with self.store.open_session() as session: + order = session.load( + order_id, + Order, + lambda i: i.include_documents("company").include_time_series_by_range_type_and_count( + ts_name1, TimeSeriesRangeType.LAST, 11 + ), + ) + self.assertEqual(1, session.advanced.number_of_requests) + + # should not go to server + + company = session.load(order.company, Company) + + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual("HR", company.name) + + # should not go to server + values = session.time_series_for(order_id, ts_name1).get(base_line - timedelta(minutes=10), None) + + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual(11, len(values)) + + for i in range(len(values)): + self.assertEqual(1, len(values[i].values)) + self.assertEqual(len(values) - 1 - i, values[i].values[0]) + self.assertEqual(tag1, values[i].tag) + self.assertEqual(base_line - timedelta(minutes=len(values) - 1 - i), values[i].timestamp) + + def test_can_load_async_with_include_array_of_time_series_last_range_by_time(self): + self.can_load_async_with_include_array_of_time_series_last_range(True) + + def test_can_load_async_with_include_array_of_time_series_last_range_by_count(self): + self.can_load_async_with_include_array_of_time_series_last_range(False) + + def can_load_async_with_include_array_of_time_series_last_range(self, by_time: bool) -> None: + with self.store.open_session() as session: + session.store(Company(name="HR"), company_id) + session.store(Order(company=company_id), order_id) + + tsf = session.time_series_for(order_id, ts_name1) + tsf.append_single(base_line, 67, tag2) + tsf.append_single(base_line - timedelta(minutes=5), 64, tag2) + tsf.append_single(base_line - timedelta(minutes=10), 65, tag1) + + tsf2 = session.time_series_for(order_id, ts_name2) + tsf2.append_single(base_line - timedelta(minutes=15), 6, tag3) + tsf2.append_single(base_line - timedelta(minutes=10), 7, tag3) + tsf2.append_single(base_line - timedelta(minutes=9), 7, tag3) + tsf2.append_single(base_line - timedelta(minutes=8), 6, tag3) + + session.save_changes() + + with self.store.open_session() as session: + order = ( + session.load( + order_id, + Order, + lambda i: i.include_documents("company").include_array_of_time_series_by_range_type_and_time( + [ts_name1, ts_name2], TimeSeriesRangeType.LAST, TimeValue.of_minutes(10) + ), + ) + if by_time + else session.load( + order_id, + Order, + lambda i: i.include_documents("company").include_array_of_time_series_by_range_type_and_count( + [ts_name1, ts_name2], TimeSeriesRangeType.LAST, 3 + ), + ) + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + # should not go to server + company = session.load(order.company, Company) + + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual("HR", company.name) + + # should not go to server + heartrate_values = session.time_series_for_entity(order, ts_name1).get( + base_line - timedelta(minutes=10), None + ) + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual(3, len(heartrate_values)) + + self.assertEqual(1, len(heartrate_values[0].values)) + self.assertEqual(65, heartrate_values[0].values[0]) + self.assertEqual(tag1, heartrate_values[0].tag) + self.assertEqual(base_line - timedelta(minutes=10), heartrate_values[0].timestamp) + + self.assertEqual(1, len(heartrate_values[1].values)) + self.assertEqual(64, heartrate_values[1].values[0]) + self.assertEqual(tag2, heartrate_values[1].tag) + self.assertEqual(base_line - timedelta(minutes=5), heartrate_values[1].timestamp) + + self.assertEqual(1, len(heartrate_values[2].values)) + self.assertEqual(67, heartrate_values[2].values[0]) + self.assertEqual(tag2, heartrate_values[2].tag) + self.assertEqual(base_line, heartrate_values[2].timestamp) + + # should not go to server + speedrate_values = session.time_series_for_entity(order, ts_name2).get( + base_line - timedelta(minutes=10), None + ) + + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual(3, len(speedrate_values)) + + self.assertEqual(1, len(speedrate_values[0].values)) + self.assertEqual(7, speedrate_values[0].values[0]) + self.assertEqual(tag3, speedrate_values[0].tag) + self.assertEqual(base_line - timedelta(minutes=10), speedrate_values[0].timestamp) + + self.assertEqual(1, len(speedrate_values[1].values)) + self.assertEqual(7, speedrate_values[1].values[0]) + self.assertEqual(tag3, speedrate_values[1].tag) + self.assertEqual(base_line - timedelta(minutes=9), speedrate_values[1].timestamp) + + self.assertEqual(1, len(speedrate_values[2].values)) + self.assertEqual(6, speedrate_values[2].values[0]) + self.assertEqual(tag3, speedrate_values[2].tag) + self.assertEqual(base_line - timedelta(minutes=8), speedrate_values[2].timestamp) + + def test_can_load_async_with_include_all_time_series_last_range_by_count(self): + with self.store.open_session() as session: + session.store(Company(name="HR"), company_id) + session.store(Order(company=company_id), order_id) + tsf = session.time_series_for(order_id, ts_name1) + for i in range(15): + tsf.append_single(base_line - timedelta(minutes=i), i, tag1) + tsf2 = session.time_series_for(order_id, ts_name2) + for i in range(15): + tsf2.append_single(base_line - timedelta(minutes=i), i, tag1) + + session.save_changes() + + with self.store.open_session() as session: + order = session.load( + order_id, + Order, + lambda i: i.include_documents("company").include_all_time_series_by_count(TimeSeriesRangeType.LAST, 11), + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + # should not go to server + company = session.load(order.company, Company) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual("HR", company.name) + + # should not go to server + heartrate_values = session.time_series_for_entity(order, ts_name1).get( + base_line - timedelta(minutes=10), None + ) + self.assertEqual(11, len(heartrate_values)) + self.assertEqual(1, session.advanced.number_of_requests) + + speedrate_values = session.time_series_for_entity(order, ts_name2).get( + base_line - timedelta(minutes=10), None + ) + + self.assertEqual(11, len(speedrate_values)) + self.assertEqual(1, session.advanced.number_of_requests) + + for i in range(len(heartrate_values)): + self.assertEqual(1, len(heartrate_values[i].values)) + self.assertEqual(len(heartrate_values) - 1 - i, heartrate_values[i].values[0]) + self.assertEqual(tag1, heartrate_values[i].tag) + self.assertEqual( + base_line - timedelta(minutes=len(heartrate_values) - 1 - i), heartrate_values[i].timestamp + ) + + for i in range(len(speedrate_values)): + self.assertEqual(1, len(speedrate_values[i].values)) + self.assertEqual(len(speedrate_values) - 1 - i, speedrate_values[i].values[0]) + self.assertEqual(tag1, speedrate_values[i].tag) + self.assertEqual( + base_line - timedelta(minutes=len(speedrate_values) - 1 - i), speedrate_values[i].timestamp + ) + + def test_can_load_async_with_include_all_time_series_last_range_by_type(self): + with self.store.open_session() as session: + session.store(Company(name="HR"), company_id) + session.store(Order(company=company_id), order_id) + tsf = session.time_series_for(order_id, ts_name1) + for i in range(15): + tsf.append_single(base_line - timedelta(minutes=i), i, tag1) + tsf2 = session.time_series_for(order_id, ts_name2) + for i in range(15): + tsf2.append_single(base_line - timedelta(minutes=i), i, tag1) + + session.save_changes() + + with self.store.open_session() as session: + order = session.load( + order_id, + Order, + lambda i: i.include_documents("company").include_all_time_series_by_time( + TimeSeriesRangeType.LAST, TimeValue.of_minutes(10) + ), + ) + + # should not go to server + company = session.load(order.company, Company) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual("HR", company.name) + + # should not go to server + heartrate_values = session.time_series_for_entity(order, ts_name1).get( + base_line - timedelta(minutes=10), None + ) + self.assertEqual(11, len(heartrate_values)) + self.assertEqual(1, session.advanced.number_of_requests) + + speedrate_values = session.time_series_for_entity(order, ts_name2).get( + base_line - timedelta(minutes=10), None + ) + + self.assertEqual(11, len(speedrate_values)) + self.assertEqual(1, session.advanced.number_of_requests) + + for i in range(len(heartrate_values)): + self.assertEqual(1, len(heartrate_values[i].values)) + self.assertEqual(len(heartrate_values) - 1 - i, heartrate_values[i].values[0]) + self.assertEqual(tag1, heartrate_values[i].tag) + self.assertEqual( + base_line - timedelta(minutes=len(heartrate_values) - 1 - i), heartrate_values[i].timestamp + ) + + for i in range(len(speedrate_values)): + self.assertEqual(1, len(speedrate_values[i].values)) + self.assertEqual(len(speedrate_values) - 1 - i, speedrate_values[i].values[0]) + self.assertEqual(tag1, speedrate_values[i].tag) + self.assertEqual( + base_line - timedelta(minutes=len(speedrate_values) - 1 - i), speedrate_values[i].timestamp + ) + + def test_multi_load_with_include_time_series(self): + with self.store.open_session() as session: + session.store(User(name="Oren"), "users/ayende") + session.store(User(name="Gracjan"), "users/gracjan") + + session.save_changes() + + with self.store.open_session() as session: + tsf1 = session.time_series_for("users/ayende", ts_name1) + tsf2 = session.time_series_for("users/gracjan", ts_name1) + + for i in range(360): + tsf1.append_single(base_line + timedelta(seconds=i * 10), 6, tag1) + + if i % 2 == 0: + tsf2.append_single(base_line + timedelta(seconds=i * 10), 7, tag1) + + session.save_changes() + + with self.store.open_session() as session: + users = session.load( + ["users/ayende", "users/gracjan"], + User, + lambda i: i.include_time_series(ts_name1, base_line, base_line + timedelta(minutes=30)), + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual("Oren", users.get("users/ayende").name) + self.assertEqual("Gracjan", users.get("users/gracjan").name) + + # should not go to server + + vals = session.time_series_for("users/ayende", ts_name1).get(base_line, base_line + timedelta(minutes=30)) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(181, len(vals)) + + self.assertEqual(base_line, vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=30), vals[180].timestamp) + + # should not go to server + + vals = session.time_series_for("users/gracjan", ts_name1).get(base_line, base_line + timedelta(minutes=30)) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(91, len(vals)) + self.assertEqual(base_line, vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=30), vals[90].timestamp) diff --git a/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_ranges_cache.py b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_ranges_cache.py new file mode 100644 index 00000000..c80ea43e --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/client_tests/time_series_tests/test_time_series_ranges_cache.py @@ -0,0 +1,75 @@ +from datetime import datetime, timedelta + +from ravendb import InMemoryDocumentSessionOperations +from ravendb.tests.test_base import TestBase, User + + +class TestTimeSeriesRangesCache(TestBase): + def setUp(self): + super().setUp() + + def test_should_get_partial_range_from_cache(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/ayende" + ts_name = "Heartrate" + + with self.store.open_session() as session: + user = User(name="Oren") + session.store(user, doc_id) + session.time_series_for(doc_id, ts_name).append(base_line + timedelta(minutes=1), [59], "watches/fitbit") + session.save_changes() + + with self.store.open_session() as session: + val = session.time_series_for(doc_id, ts_name).get()[0] + + self.assertEqual([59], val.values) + self.assertEqual("watches/fitbit", val.tag) + self.assertEqual(base_line + timedelta(minutes=1), val.timestamp) + + self.assertEqual(1, session.advanced.number_of_requests) + + # should load from cache + val = session.time_series_for(doc_id, ts_name).get(base_line, base_line + timedelta(days=1))[0] + + self.assertEqual([59], val.values) + self.assertEqual("watches/fitbit", val.tag) + self.assertEqual(base_line + timedelta(minutes=1), val.timestamp) + + self.assertEqual(1, session.advanced.number_of_requests) + + in_memory_session: InMemoryDocumentSessionOperations = session + + cache = in_memory_session.time_series_by_doc_id[doc_id] + self.assertIsNotNone(cache) + + ranges = cache.get(ts_name) + self.assertIsNotNone(ranges) + self.assertEqual(1, len(ranges)) + + def test_should_get_time_series_value_from_cache(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/ayende" + ts_name = "Heartrate" + + with self.store.open_session() as session: + user = User(name="Oren") + session.store(user, doc_id) + session.time_series_for(doc_id, ts_name).append(base_line + timedelta(minutes=1), [59], "watches/fitbit") + session.save_changes() + + with self.store.open_session() as session: + val = session.time_series_for("users/ayende", ts_name).get()[0] + + self.assertEqual([59], val.values) + self.assertEqual("watches/fitbit", val.tag) + self.assertEqual(base_line + timedelta(minutes=1), val.timestamp) + + self.assertEqual(1, session.advanced.number_of_requests) + + # should load from cache + val = session.time_series_for(doc_id, ts_name).get()[0] + self.assertEqual([59], val.values) + self.assertEqual("watches/fitbit", val.tag) + self.assertEqual(base_line + timedelta(minutes=1), val.timestamp) + + self.assertEqual(1, session.advanced.number_of_requests) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14164.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14164.py new file mode 100644 index 00000000..c19c1734 --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14164.py @@ -0,0 +1,335 @@ +from datetime import datetime, timedelta +from typing import Optional + +from ravendb.documents.session.loaders.include import TimeSeriesIncludeBuilder +from ravendb.infrastructure.entities import User +from ravendb.tests.test_base import TestBase + + +document_id = "users/gracjan" +company_id = "companies/1-A" +order_id = "orders/1-A" +base_line = datetime(2023, 8, 20, 21, 30) +ts_name1 = "Heartrate" +ts_name2 = "Speedrate" +tag1 = "watches/fitbit" +tag2 = "watches/apple" +tag3 = "watches/sony" + + +class Watch: + def __init__(self, name: Optional[str] = None, accuracy: Optional[float] = None): + self.name = name + self.accuracy = accuracy + + +class TestRavenDB14164(TestBase): + def setUp(self): + super(TestRavenDB14164, self).setUp() + + def test_can_get_time_series_with_include_tag_documents(self): + tags = [tag1, tag2, tag3] + with self.store.open_session() as session: + session.store(User(), document_id) + + session.store(Watch("FitBit", 0.855), tags[0]) + session.store(Watch("Apple", 0.9), tags[1]) + session.store(Watch("Sony", 0.78), tags[2]) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, ts_name1) + for i in range(121): + tsf.append_single(base_line + timedelta(minutes=i), i, tags[i % 3]) + session.save_changes() + + with self.store.open_session() as session: + get_results = session.time_series_for(document_id, ts_name1).get_include( + base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(121, len(get_results)) + self.assertEqual(base_line, get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=2), get_results[-1].timestamp) + + # should not go to server + tag_documents = session.load(tags, Watch) + self.assertEqual(1, session.advanced.number_of_requests) + + # assert tag documents + + self.assertEqual(3, len(tag_documents)) + + tag_doc = tag_documents.get("watches/fitbit") + self.assertEqual("FitBit", tag_doc.name) + self.assertEqual(0.855, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/apple") + self.assertEqual("Apple", tag_doc.name) + self.assertEqual(0.9, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/sony") + self.assertEqual("Sony", tag_doc.name) + self.assertEqual(0.78, tag_doc.accuracy) + + def test_can_get_time_series_with_include_tags_and_parent_document(self): + tags = [tag1, tag2, tag3] + with self.store.open_session() as session: + session.store(User(name="poisson"), document_id) + session.store(Watch("FitBit", 0.855), tags[0]) + session.store(Watch("Apple", 0.9), tags[1]) + session.store(Watch("Sony", 0.78), tags[2]) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, ts_name1) + for i in range(121): + tsf.append_single(base_line + timedelta(minutes=i), i, tags[i % 3]) + session.save_changes() + + with self.store.open_session() as session: + get_results = session.time_series_for(document_id, ts_name1).get_include( + base_line, base_line + timedelta(hours=2), lambda i: i.include_tags().include_document() + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(121, len(get_results)) + self.assertEqual(base_line, get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=2), get_results[-1].timestamp) + + # should not go to server + user = session.load(document_id, User) + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual("poisson", user.name) + + # should not go to server + tag_documents = session.load(tags, Watch) + self.assertEqual(1, session.advanced.number_of_requests) + + # assert tag documents + + self.assertEqual(3, len(tag_documents)) + + tag_doc = tag_documents.get("watches/fitbit") + self.assertEqual("FitBit", tag_doc.name) + self.assertEqual(0.855, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/apple") + self.assertEqual("Apple", tag_doc.name) + self.assertEqual(0.9, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/sony") + self.assertEqual("Sony", tag_doc.name) + self.assertEqual(0.78, tag_doc.accuracy) + + def test_includes_should_affect_time_series_get_command_etag(self): + tags = [tag1, tag2, tag3] + with self.store.open_session() as session: + session.store(User(name="poisson"), document_id) + session.store(Watch("FitBit", 0.855), tags[0]) + session.store(Watch("Apple", 0.9), tags[1]) + session.store(Watch("Sony", 0.78), tags[2]) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, ts_name1) + for i in range(121): + tsf.append_single(base_line + timedelta(minutes=i), i, tags[i % 3]) + session.save_changes() + + with self.store.open_session() as session: + get_results = session.time_series_for(document_id, ts_name1).get_include( + base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(121, len(get_results)) + self.assertEqual(base_line, get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=2), get_results[-1].timestamp) + + # should not go to server + tag_documents = session.load(tags, Watch) + self.assertEqual(1, session.advanced.number_of_requests) + + # assert tag documents + + self.assertEqual(3, len(tag_documents)) + + tag_doc = tag_documents.get("watches/fitbit") + self.assertEqual("FitBit", tag_doc.name) + self.assertEqual(0.855, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/apple") + self.assertEqual("Apple", tag_doc.name) + self.assertEqual(0.9, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/sony") + self.assertEqual("Sony", tag_doc.name) + self.assertEqual(0.78, tag_doc.accuracy) + + with self.store.open_session() as session: + # update tags[0] + watch = session.load(tags[0], Watch) + watch.accuracy += 0.05 + session.save_changes() + + with self.store.open_session() as session: + get_results = session.time_series_for(document_id, ts_name1).get_include( + base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(121, len(get_results)) + self.assertEqual(base_line, get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=2), get_results[-1].timestamp) + # should not go to server + + tag_documents = session.load(tags, Watch) + self.assertEqual(1, session.advanced.number_of_requests) + + # assert tag documents + + self.assertEqual(3, len(tag_documents)) + + tag_doc = tag_documents.get("watches/fitbit") + self.assertEqual("FitBit", tag_doc.name) + self.assertEqual(0.905, tag_doc.accuracy) + + new_tag = "watches/google" + + with self.store.open_session() as session: + session.store(Watch("Google Watch", 0.75), new_tag) + # update a time series entry to have the new tag + + session.time_series_for(document_id, ts_name1).append_single(base_line + timedelta(minutes=45), 90, new_tag) + session.save_changes() + + with self.store.open_session() as session: + get_results = session.time_series_for(document_id, ts_name1).get_include( + base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() + ) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(121, len(get_results)) + self.assertEqual(base_line, get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=2), get_results[-1].timestamp) + + # should not go to server + tag_documents = session.load(tags, Watch) + self.assertEqual(1, session.advanced.number_of_requests) + + # assert that new tag is in cache + watch = session.load(new_tag, Watch) + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual("Google Watch", watch.name) + self.assertEqual(0.75, watch.accuracy) + + def test_can_get_time_series_with_include_cache_not_empty(self): + tags = [tag1, tag2, tag3] + with self.store.open_session() as session: + session.store(User(name="poisson"), document_id) + session.store(Watch("FitBit", 0.855), tags[0]) + session.store(Watch("Apple", 0.9), tags[1]) + session.store(Watch("Sony", 0.78), tags[2]) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, ts_name1) + for i in range(121): + tag = tags[0 if i < 60 else 1 if i < 90 else 2] + tsf.append_single(base_line + timedelta(minutes=i), i, tag) + + session.save_changes() + + with self.store.open_session() as session: + # get [21:30 - 22:30] + get_results = session.time_series_for(document_id, ts_name1).get(base_line, base_line + timedelta(hours=1)) + + self.assertEqual(61, len(get_results)) + self.assertEqual(base_line, get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=1), get_results[-1].timestamp) + + # get [22:45 - 23:30] with includes + get_results = session.time_series_for(document_id, ts_name1).get_include( + base_line + timedelta(minutes=75), base_line + timedelta(hours=2), TimeSeriesIncludeBuilder.include_tags + ) + + self.assertEqual(2, session.advanced.number_of_requests) + + self.assertEqual(46, len(get_results)) + self.assertEqual(base_line + timedelta(minutes=75), get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=2), get_results[-1].timestamp) + + # should not go to server + + tags_documents = session.load(tags[1:3], Watch) + self.assertEqual(2, session.advanced.number_of_requests) + + # assert tag documents + self.assertEqual(2, len(tags_documents)) + + tag_doc = tags_documents.get("watches/apple") + self.assertEqual("Apple", tag_doc.name) + self.assertEqual(0.9, tag_doc.accuracy) + + tag_doc = tags_documents.get("watches/sony") + self.assertEqual("Sony", tag_doc.name) + self.assertEqual(0.78, tag_doc.accuracy) + + # watches/fitbit should not be in cache + watch = session.load(tags[0], Watch) + self.assertEqual(3, session.advanced.number_of_requests) + self.assertEqual("FitBit", watch.name) + self.assertEqual(0.855, watch.accuracy) + + def test_can_get_time_series_with_include_tags_when_not_all_entries_have_tags(self): + tags = [tag1, tag2, tag3] + with self.store.open_session() as session: + session.store(User(name="poisson"), document_id) + session.store(Watch("FitBit", 0.855), tags[0]) + session.store(Watch("Apple", 0.9), tags[1]) + session.store(Watch("Sony", 0.78), tags[2]) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.time_series_for(document_id, ts_name1) + for i in range(121): + tsf.append_single(base_line + timedelta(minutes=i), i, tags[i % 3]) + session.save_changes() + + with self.store.open_session() as session: + get_results = session.time_series_for(document_id, ts_name1).get_include( + base_line, base_line + timedelta(hours=2), lambda i: i.include_tags() + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(121, len(get_results)) + self.assertEqual(base_line, get_results[0].timestamp) + self.assertEqual(base_line + timedelta(hours=2), get_results[-1].timestamp) + + # should not go to server + tag_documents = session.load(tags, Watch) + self.assertEqual(1, session.advanced.number_of_requests) + + # assert tag documents + + self.assertEqual(3, len(tag_documents)) + + tag_doc = tag_documents.get("watches/fitbit") + self.assertEqual("FitBit", tag_doc.name) + self.assertEqual(0.855, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/apple") + self.assertEqual("Apple", tag_doc.name) + self.assertEqual(0.9, tag_doc.accuracy) + + tag_doc = tag_documents.get("watches/sony") + self.assertEqual("Sony", tag_doc.name) + self.assertEqual(0.78, tag_doc.accuracy) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14994.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14994.py new file mode 100644 index 00000000..86d4b894 --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_14994.py @@ -0,0 +1,47 @@ +from datetime import datetime, timedelta + +from ravendb.documents.operations.time_series import GetTimeSeriesOperation +from ravendb.infrastructure.entities import User +from ravendb.tests.test_base import TestBase + + +class TestRavenDB14994(TestBase): + def setUp(self): + super().setUp() + + def test_get_on_non_existing_time_series_should_return_null(self): + document_id = "users/gracjan" + with self.store.open_session() as session: + session.store(User(), document_id) + session.save_changes() + + get = self.store.operations.send(GetTimeSeriesOperation(document_id, "HeartRate")) + self.assertIsNone(get) + + with self.store.open_session() as session: + self.assertIsNone(session.time_series_for(document_id, "HeartRate").get()) + + def test_get_on_empty_range_should_return_empty_array(self): + document_id = "users/gracjan" + base_line = datetime(2023, 8, 20, 21, 30) + with self.store.open_session() as session: + session.store(User(), document_id) + + tsf = session.time_series_for(document_id, "HeartRate") + for i in range(10): + tsf.append_single(base_line + timedelta(minutes=i), i) + + session.save_changes() + + get = self.store.operations.send( + GetTimeSeriesOperation( + document_id, "HeartRate", base_line - timedelta(minutes=2), base_line - timedelta(minutes=1) + ) + ) + self.assertEqual(0, len(get.entries)) + + with self.store.open_session() as session: + result = session.time_series_for(document_id, "HeartRate").get( + base_line - timedelta(days=62), base_line - timedelta(days=31) + ) + self.assertEqual(0, len(result)) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15000.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15000.py new file mode 100644 index 00000000..2c09cb88 --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_15000.py @@ -0,0 +1,59 @@ +from datetime import datetime + +from ravendb.infrastructure.orders import Order, Company +from ravendb.tests.test_base import TestBase + + +class TestRavenDB15000(TestBase): + def setUp(self): + super(TestRavenDB15000, self).setUp() + + def test_can_include_time_series_without_providing_from_and_to_dates_via_load(self): + with self.store.open_session() as session: + session.store(Order(company="companies/1-A"), "orders/1-A") + session.store(Company(name="HR"), "companies/1-A") + session.time_series_for("orders/1-A", "Heartrate").append_single(datetime(1999, 11, 14), 1) + session.save_changes() + + with self.store.open_session() as session: + order = session.load( + "orders/1-A", Order, lambda i: i.include_documents("company").include_time_series("Heartrate") + ) + + # should not go to server + company = session.load(order.company, Company) + + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual("HR", company.name) + + # should not go to server + vals = session.time_series_for_entity(order, "Heartrate").get() + + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual(1, len(vals)) + + def test_can_include_time_series_without_providing_from_and_to_dates_via_query(self): + with self.store.open_session() as session: + session.store(Order(company="companies/1-A"), "orders/1-A") + session.store(Company(name="HR"), "companies/1-A") + session.time_series_for("orders/1-A", "Heartrate").append_single(datetime(1999, 11, 14), 1) + session.save_changes() + + with self.store.open_session() as session: + order = ( + session.query(object_type=Order) + .include(lambda i: i.include_documents("company").include_time_series("Heartrate")) + .first() + ) + + # should not go to server + company = session.load(order.company, Company) + + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual("HR", company.name) + + # should not go to server + vals = session.time_series_for_entity(order, "Heartrate").get() + + self.assertEqual(1, session.advanced.number_of_requests) + self.assertEqual(1, len(vals)) diff --git a/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_16060.py b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_16060.py new file mode 100644 index 00000000..e3de619a --- /dev/null +++ b/ravendb/tests/jvm_migrated_tests/issues_tests/test_ravenDB_16060.py @@ -0,0 +1,400 @@ +from __future__ import annotations +from datetime import datetime, timedelta +from typing import Dict, Tuple, Optional + +from ravendb.documents.session.time_series import ITimeSeriesValuesBindable, TimeSeriesRangeType +from ravendb.infrastructure.entities import User +from ravendb.primitives.time_series import TimeValue +from ravendb.tests.test_base import TestBase + + +class HeartRateMeasure(ITimeSeriesValuesBindable): + def __init__(self, value: float): + self.heart_rate = value + + def get_time_series_mapping(self) -> Dict[int, Tuple[str, Optional[str]]]: + return {0: ("heart_rate", None)} + + +class TestRavenDB16060(TestBase): + def setUp(self): + super().setUp() + + def test_can_serve_time_series_from_cache_typed(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/0x901507" + + with self.store.open_session() as session: + user = User(name="Marcelo") + session.store(user, doc_id) + + ts = session.typed_time_series_for(HeartRateMeasure, doc_id) + + ts.append(base_line, HeartRateMeasure(59), "watches/fitbit") + session.save_changes() + + with self.store.open_session() as session: + time_series = session.typed_time_series_for(HeartRateMeasure, doc_id).get() + + self.assertEqual(1, len(time_series)) + self.assertEqual(59, time_series[0].value.heart_rate) + + time_series_2 = session.typed_time_series_for(HeartRateMeasure, doc_id).get() + + self.assertEqual(1, len(time_series_2)) + self.assertEqual(59, time_series_2[0].value.heart_rate) + + self.assertEqual(1, session.advanced.number_of_requests) + + def test_include_time_series_and_update_existing_range_in_cache_typed(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/0x901507" + + with self.store.open_session() as session: + user = User(name="Gracjan") + session.store(user, doc_id) + + session.save_changes() + + with self.store.open_session() as session: + tsf = session.typed_time_series_for(HeartRateMeasure, doc_id) + + for i in range(360): + tsf.append(base_line + timedelta(seconds=i * 10), HeartRateMeasure(6), "watches/fitbit") + + session.save_changes() + + with self.store.open_session() as session: + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get( + base_line + timedelta(minutes=2), base_line + timedelta(minutes=10) + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(49, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=2), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=10), vals[48].timestamp) + + singularity = base_line + timedelta(minutes=3, seconds=3) + + session.typed_time_series_for(HeartRateMeasure, doc_id).append( + singularity, HeartRateMeasure(6), "watches/fitbit" + ) + session.save_changes() + + self.assertEqual(2, session.advanced.number_of_requests) + + user = session.load( + doc_id, + User, + lambda i: i.include_time_series( + "heartRateMeasures", base_line + timedelta(minutes=3), base_line + timedelta(minutes=5) + ), + ) + + self.assertEqual(3, session.advanced.number_of_requests) + + # should not go to server + + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get( + base_line + timedelta(minutes=3), base_line + timedelta(minutes=5) + ) + + self.assertEqual(3, session.advanced.number_of_requests) + + self.assertEqual(14, len(vals)) + self.assertEqual(base_line + timedelta(minutes=3), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=3, seconds=3), vals[1].timestamp) + self.assertEqual(base_line + timedelta(minutes=5), vals[13].timestamp) + + def test_can_include_typed_time_series(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/0x901507" + + with self.store.open_session() as session: + user = User(name="Gracjan") + session.store(user, doc_id) + ts = session.typed_time_series_for(HeartRateMeasure, doc_id) + ts.append(base_line, HeartRateMeasure(59), "watches/fitbit") + + session.save_changes() + + with self.store.open_session() as session: + items = list(session.query(object_type=User).include(lambda x: x.include_time_series("HeartRateMeasures"))) + + for item in items: + time_series = session.typed_time_series_for(HeartRateMeasure, item.Id, "HeartRateMeasures").get() + self.assertEqual(1, len(time_series)) + self.assertEqual(59, time_series[0].value.heart_rate) + + self.assertEqual(1, session.advanced.number_of_requests) + + def test_include_time_series_and_merge_with_existing_ranges_in_cache_typed(self): + base_line = datetime(2023, 8, 20, 21, 30) + doc_id = "users/0x901507" + + with self.store.open_session() as session: + user = User(name="Gracjan") + session.store(user, doc_id) + session.save_changes() + + with self.store.open_session() as session: + tsf = session.typed_time_series_for(HeartRateMeasure, doc_id) + + for i in range(360): + typed_measure = HeartRateMeasure(6) + tsf.append(base_line + timedelta(seconds=i * 10), typed_measure, "watches/fitbit") + + session.save_changes() + + with self.store.open_session() as session: + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get( + base_line + timedelta(minutes=2), base_line + timedelta(minutes=10) + ) + + self.assertEqual(1, session.advanced.number_of_requests) + + self.assertEqual(49, len(vals)) + self.assertEqual(base_line + timedelta(minutes=2), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=10), vals[48].timestamp) + + user = session.load( + doc_id, + User, + lambda i: i.include_time_series( + "heartRateMeasures", base_line + timedelta(minutes=40), base_line + timedelta(minutes=50) + ), + ) + + self.assertEqual(2, session.advanced.number_of_requests) + + # should not go to server + + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get( + base_line + timedelta(minutes=40), base_line + timedelta(minutes=50) + ) + + self.assertEqual(2, session.advanced.number_of_requests) + + self.assertEqual(61, len(vals)) + self.assertEqual(base_line + timedelta(minutes=40), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=50), vals[60].timestamp) + + cache = session.time_series_by_doc_id.get(doc_id, None) + self.assertIsNotNone(cache) + + ranges = cache.get("heartRateMeasures") + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line + timedelta(minutes=2), ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=10), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # we intentionally evict just the document (without it's TS data), + # so that Load request will go to server + + session.documents_by_entity.evict(user) + session.documents_by_id.remove(doc_id) + + # should go to server to get [0, 2] and merge it into existing [2, 10] + user = session.load( + doc_id, + User, + lambda i: i.include_time_series("heartRateMeasures", base_line, base_line + timedelta(minutes=2)), + ) + + self.assertEqual(3, session.advanced.number_of_requests) + + # should not go to server + + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get( + base_line, base_line + timedelta(minutes=2) + ) + self.assertEqual(3, session.advanced.number_of_requests) + + self.assertEqual(13, len(vals)) + self.assertEqual(base_line, vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=2), vals[12].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=10), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # evict just the document + + session.documents_by_entity.evict(user) + session.documents_by_id.remove(doc_id) + + # should go to server to get [10, 16] and merge it into existing [0, 10] + user = session.load( + doc_id, + User, + lambda i: i.include_time_series( + "heartRateMeasures", base_line + timedelta(minutes=10), base_line + timedelta(minutes=16) + ), + ) + + self.assertEqual(4, session.advanced.number_of_requests) + + # should not go to server + + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get( + base_line + timedelta(minutes=10), base_line + timedelta(minutes=16) + ) + + self.assertEqual(4, session.advanced.number_of_requests) + + self.assertEqual(37, len(vals)) + self.assertEqual(base_line + timedelta(minutes=10), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=16), vals[36].timestamp) + + self.assertEqual(2, len(ranges)) + + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=16), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # evict just the document + session.documents_by_entity.evict(user) + session.documents_by_id.remove(doc_id) + + # should go to server to get range [17,19] + # and add it to cache between [10, 16] and [40, 50] + + user = session.load( + doc_id, + User, + lambda i: i.include_time_series( + "heartRateMeasures", base_line + timedelta(minutes=17), base_line + timedelta(minutes=19) + ), + ) + + self.assertEqual(5, session.advanced.number_of_requests) + + # should not go to server + + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get( + base_line + timedelta(minutes=17), base_line + timedelta(minutes=19) + ) + + self.assertEqual(5, session.advanced.number_of_requests) + + self.assertEqual(13, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=17), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=19), vals[12].timestamp) + + self.assertEqual(3, len(ranges)) + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=16), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=17), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=19), ranges[1].to_date) + self.assertEqual(base_line + timedelta(minutes=40), ranges[2].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[2].to_date) + + # evict just the document + session.documents_by_entity.evict(user) + session.documents_by_id.remove(doc_id) + + # should go to server to get range [19, 40] + # and merge the result with existing ranges [17, 19] and [40, 50] + # into single range [17, 50] + + user = session.load( + doc_id, + User, + lambda i: i.include_time_series( + "heartRateMeasures", base_line + timedelta(minutes=18), base_line + timedelta(minutes=48) + ), + ) + + self.assertEqual(6, session.advanced.number_of_requests) + + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get( + base_line + timedelta(minutes=18), base_line + timedelta(minutes=48) + ) + + self.assertEqual(6, session.advanced.number_of_requests) + + self.assertEqual(181, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=18), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=48), vals[180].timestamp) + + self.assertEqual(2, len(ranges)) + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=16), ranges[0].to_date) + self.assertEqual(base_line + timedelta(minutes=17), ranges[1].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[1].to_date) + + # evict just the document + session.documents_by_entity.evict(user) + session.documents_by_id.remove(doc_id) + + # should go to server to get range [12, 22] + # and merge the result with existing ranges [10, 16] and [17, 50] + # into single range [0, 50] + + user = session.load( + doc_id, + User, + lambda i: i.include_time_series( + "heartRateMeasures", base_line + timedelta(minutes=12), base_line + timedelta(minutes=22) + ), + ) + + self.assertEqual(7, session.advanced.number_of_requests) + + # should not go to server + + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get( + base_line + timedelta(minutes=12), base_line + timedelta(minutes=22) + ) + + self.assertEqual(7, session.advanced.number_of_requests) + + self.assertEqual(61, len(vals)) + self.assertEqual(base_line + timedelta(minutes=12), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=22), vals[60].timestamp) + + self.assertEqual(1, len(ranges)) + self.assertEqual(base_line, ranges[0].from_date) + self.assertEqual(base_line + timedelta(minutes=50), ranges[0].to_date) + + # evict just the document + session.documents_by_entity.evict(user) + session.documents_by_id.remove(doc_id) + + # should go to server to get range[50, ∞] + # and merge the result with existing range[0, 50] into single range[0, ∞] + user = session.load( + doc_id, + User, + lambda i: i.include_time_series_by_range_type_and_time( + "heartRateMeasures", TimeSeriesRangeType.LAST, TimeValue.of_minutes(10) + ), + ) + + self.assertEqual(8, session.advanced.number_of_requests) + + # should not go to server + + vals = session.typed_time_series_for(HeartRateMeasure, doc_id).get(base_line + timedelta(minutes=50), None) + + self.assertEqual(8, session.advanced.number_of_requests) + + self.assertEqual(60, len(vals)) + + self.assertEqual(base_line + timedelta(minutes=50), vals[0].timestamp) + self.assertEqual(base_line + timedelta(minutes=59, seconds=50), vals[59].timestamp) + + self.assertEqual(1, len(ranges)) + + self.assertEqual(base_line, ranges[0].from_date) + self.assertIsNone(ranges[0].to_date) diff --git a/ravendb/tests/operations_tests/test_time_series_operation.py b/ravendb/tests/operations_tests/test_time_series_operation.py index f48a600c..c4fcbf84 100644 --- a/ravendb/tests/operations_tests/test_time_series_operation.py +++ b/ravendb/tests/operations_tests/test_time_series_operation.py @@ -26,7 +26,7 @@ def add_time_series(self): ts_operation = TimeSeriesOperation(self.ts_name) base = datetime.now() - base_24 = datetime(2023, 11, 14) + base_24 = datetime(2022, 11, 14) ts_operation.append(TimeSeriesOperation.AppendOperation(base, [73], "heart/rates")) ts_operation.append(TimeSeriesOperation.AppendOperation(base + timedelta(minutes=5), [78], "heart/rates")) diff --git a/ravendb/tests/session_tests/test_advanced.py b/ravendb/tests/session_tests/test_advanced.py index cfd415e5..5516574e 100644 --- a/ravendb/tests/session_tests/test_advanced.py +++ b/ravendb/tests/session_tests/test_advanced.py @@ -1,4 +1,4 @@ -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.indexes.definitions import IndexDefinition from ravendb.documents.operations.indexes import PutIndexesOperation from ravendb.tests.test_base import TestBase diff --git a/ravendb/tests/test_base.py b/ravendb/tests/test_base.py index daee25e5..ad52df1b 100644 --- a/ravendb/tests/test_base.py +++ b/ravendb/tests/test_base.py @@ -9,7 +9,7 @@ from subprocess import Popen from typing import Iterable, List, Union, Optional, Set from datetime import timedelta -from ravendb import constants +from ravendb.primitives import constants from ravendb.documents.operations.indexes import GetIndexErrorsOperation from ravendb.exceptions.exceptions import DatabaseDoesNotExistException from ravendb.documents.indexes.definitions import IndexState, IndexErrors diff --git a/ravendb/tools/time_series.py b/ravendb/tools/time_series.py new file mode 100644 index 00000000..dbbe6fbc --- /dev/null +++ b/ravendb/tools/time_series.py @@ -0,0 +1,11 @@ +from datetime import datetime + + +class TSRangeHelper: + @staticmethod + def left(date: datetime) -> datetime: + return date or datetime.min + + @staticmethod + def right(date: datetime) -> datetime: + return date or datetime.max diff --git a/ravendb/tools/utils.py b/ravendb/tools/utils.py index 85cf4eff..bcf5cba0 100644 --- a/ravendb/tools/utils.py +++ b/ravendb/tools/utils.py @@ -4,7 +4,7 @@ import time from typing import Optional, Dict, Generic, Tuple, TypeVar, Collection, List, Union, Type, TYPE_CHECKING -from ravendb import constants +from ravendb.primitives import constants from ravendb.exceptions import exceptions from ravendb.json.metadata_as_dictionary import MetadataAsDictionary import OpenSSL.crypto