From 4f7d8dc2b2d9e319cc934ad5181d71bdfa1375c3 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Tue, 18 Mar 2025 19:25:12 -0700 Subject: [PATCH 1/2] Updated tests --- splitio/client/factory.py | 23 +- splitio/sync/split.py | 2 +- tests/api/test_auth.py | 4 +- tests/api/test_splits_api.py | 12 +- tests/client/test_client.py | 155 +++- tests/client/test_input_validator.py | 22 +- tests/client/test_localhost.py | 14 +- tests/client/test_manager.py | 8 +- tests/integration/__init__.py | 70 +- tests/integration/files/splitChanges.json | 114 ++- tests/integration/files/split_changes.json | 8 +- .../integration/files/split_changes_temp.json | 2 +- tests/integration/test_client_e2e.py | 302 +++++-- .../integration/test_pluggable_integration.py | 32 +- tests/integration/test_redis_integration.py | 16 +- tests/integration/test_streaming_e2e.py | 766 ++++++++++-------- tests/models/grammar/test_matchers.py | 8 +- tests/push/test_parser.py | 4 +- tests/storage/test_pluggable.py | 56 +- tests/sync/test_manager.py | 8 +- tests/sync/test_segments_synchronizer.py | 44 +- tests/sync/test_splits_synchronizer.py | 66 +- tests/sync/test_synchronizer.py | 80 +- tests/sync/test_telemetry.py | 4 +- tests/tasks/test_segment_sync.py | 8 +- tests/tasks/test_split_sync.py | 73 +- 26 files changed, 1163 insertions(+), 738 deletions(-) diff --git a/splitio/client/factory.py b/splitio/client/factory.py index bb402bb5..7c56819f 100644 --- a/splitio/client/factory.py +++ b/splitio/client/factory.py @@ -23,14 +23,17 @@ from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySegmentStorage, \ InMemoryImpressionStorage, InMemoryEventStorage, InMemoryTelemetryStorage, LocalhostTelemetryStorage, \ InMemorySplitStorageAsync, InMemorySegmentStorageAsync, InMemoryImpressionStorageAsync, \ - InMemoryEventStorageAsync, InMemoryTelemetryStorageAsync, LocalhostTelemetryStorageAsync + InMemoryEventStorageAsync, InMemoryTelemetryStorageAsync, LocalhostTelemetryStorageAsync, \ + InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.storage.adapters import redis from splitio.storage.redis import RedisSplitStorage, RedisSegmentStorage, RedisImpressionsStorage, \ RedisEventsStorage, RedisTelemetryStorage, RedisSplitStorageAsync, RedisEventsStorageAsync,\ - RedisSegmentStorageAsync, RedisImpressionsStorageAsync, RedisTelemetryStorageAsync + RedisSegmentStorageAsync, RedisImpressionsStorageAsync, RedisTelemetryStorageAsync, \ + RedisRuleBasedSegmentsStorage, RedisRuleBasedSegmentsStorageAsync from splitio.storage.pluggable import PluggableEventsStorage, PluggableImpressionsStorage, PluggableSegmentStorage, \ PluggableSplitStorage, PluggableTelemetryStorage, PluggableTelemetryStorageAsync, PluggableEventsStorageAsync, \ - PluggableImpressionsStorageAsync, PluggableSegmentStorageAsync, PluggableSplitStorageAsync + PluggableImpressionsStorageAsync, PluggableSegmentStorageAsync, PluggableSplitStorageAsync, \ + PluggableRuleBasedSegmentsStorage, PluggableRuleBasedSegmentsStorageAsync # APIs from splitio.api.client import HttpClient, HttpClientAsync, HttpClientKerberos @@ -543,6 +546,7 @@ def _build_in_memory_factory(api_key, cfg, sdk_url=None, events_url=None, # pyl storages = { 'splits': InMemorySplitStorage(cfg['flagSetsFilter'] if cfg['flagSetsFilter'] is not None else []), 'segments': InMemorySegmentStorage(), + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': InMemoryImpressionStorage(cfg['impressionsQueueSize'], telemetry_runtime_producer), 'events': InMemoryEventStorage(cfg['eventsQueueSize'], telemetry_runtime_producer), } @@ -559,7 +563,7 @@ def _build_in_memory_factory(api_key, cfg, sdk_url=None, events_url=None, # pyl imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers( - SplitSynchronizer(apis['splits'], storages['splits']), + SplitSynchronizer(apis['splits'], storages['splits'], storages['rule_based_segments']), SegmentSynchronizer(apis['segments'], storages['splits'], storages['segments']), ImpressionSynchronizer(apis['impressions'], storages['impressions'], cfg['impressionsBulkSize']), @@ -671,6 +675,7 @@ async def _build_in_memory_factory_async(api_key, cfg, sdk_url=None, events_url= storages = { 'splits': InMemorySplitStorageAsync(cfg['flagSetsFilter'] if cfg['flagSetsFilter'] is not None else []), 'segments': InMemorySegmentStorageAsync(), + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': InMemoryImpressionStorageAsync(cfg['impressionsQueueSize'], telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(cfg['eventsQueueSize'], telemetry_runtime_producer), } @@ -687,7 +692,7 @@ async def _build_in_memory_factory_async(api_key, cfg, sdk_url=None, events_url= imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers( - SplitSynchronizerAsync(apis['splits'], storages['splits']), + SplitSynchronizerAsync(apis['splits'], storages['splits'], storages['rule_based_segments']), SegmentSynchronizerAsync(apis['segments'], storages['splits'], storages['segments']), ImpressionSynchronizerAsync(apis['impressions'], storages['impressions'], cfg['impressionsBulkSize']), @@ -756,6 +761,7 @@ def _build_redis_factory(api_key, cfg): storages = { 'splits': RedisSplitStorage(redis_adapter, cache_enabled, cache_ttl, []), 'segments': RedisSegmentStorage(redis_adapter), + 'rule_based_segments': RedisRuleBasedSegmentsStorage(redis_adapter), 'impressions': RedisImpressionsStorage(redis_adapter, sdk_metadata), 'events': RedisEventsStorage(redis_adapter, sdk_metadata), 'telemetry': RedisTelemetryStorage(redis_adapter, sdk_metadata) @@ -839,6 +845,7 @@ async def _build_redis_factory_async(api_key, cfg): storages = { 'splits': RedisSplitStorageAsync(redis_adapter, cache_enabled, cache_ttl), 'segments': RedisSegmentStorageAsync(redis_adapter), + 'rule_based_segments': RedisRuleBasedSegmentsStorageAsync(redis_adapter), 'impressions': RedisImpressionsStorageAsync(redis_adapter, sdk_metadata), 'events': RedisEventsStorageAsync(redis_adapter, sdk_metadata), 'telemetry': await RedisTelemetryStorageAsync.create(redis_adapter, sdk_metadata) @@ -922,6 +929,7 @@ def _build_pluggable_factory(api_key, cfg): storages = { 'splits': PluggableSplitStorage(pluggable_adapter, storage_prefix, []), 'segments': PluggableSegmentStorage(pluggable_adapter, storage_prefix), + 'rule_based_segments': PluggableRuleBasedSegmentsStorage(pluggable_adapter, storage_prefix), 'impressions': PluggableImpressionsStorage(pluggable_adapter, sdk_metadata, storage_prefix), 'events': PluggableEventsStorage(pluggable_adapter, sdk_metadata, storage_prefix), 'telemetry': PluggableTelemetryStorage(pluggable_adapter, sdk_metadata, storage_prefix) @@ -1003,6 +1011,7 @@ async def _build_pluggable_factory_async(api_key, cfg): storages = { 'splits': PluggableSplitStorageAsync(pluggable_adapter, storage_prefix), 'segments': PluggableSegmentStorageAsync(pluggable_adapter, storage_prefix), + 'rule_based_segments': PluggableRuleBasedSegmentsStorageAsync(pluggable_adapter, storage_prefix), 'impressions': PluggableImpressionsStorageAsync(pluggable_adapter, sdk_metadata, storage_prefix), 'events': PluggableEventsStorageAsync(pluggable_adapter, sdk_metadata, storage_prefix), 'telemetry': await PluggableTelemetryStorageAsync.create(pluggable_adapter, sdk_metadata, storage_prefix) @@ -1081,6 +1090,7 @@ def _build_localhost_factory(cfg): storages = { 'splits': InMemorySplitStorage(cfg['flagSetsFilter'] if cfg['flagSetsFilter'] is not None else []), 'segments': InMemorySegmentStorage(), # not used, just to avoid possible future errors. + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': LocalhostImpressionsStorage(), 'events': LocalhostEventsStorage(), } @@ -1088,6 +1098,7 @@ def _build_localhost_factory(cfg): synchronizers = SplitSynchronizers( LocalSplitSynchronizer(cfg['splitFile'], storages['splits'], + storages['rule_based_segments'], localhost_mode), LocalSegmentSynchronizer(cfg['segmentDirectory'], storages['splits'], storages['segments']), None, None, None, @@ -1151,6 +1162,7 @@ async def _build_localhost_factory_async(cfg): storages = { 'splits': InMemorySplitStorageAsync(), 'segments': InMemorySegmentStorageAsync(), # not used, just to avoid possible future errors. + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': LocalhostImpressionsStorageAsync(), 'events': LocalhostEventsStorageAsync(), } @@ -1158,6 +1170,7 @@ async def _build_localhost_factory_async(cfg): synchronizers = SplitSynchronizers( LocalSplitSynchronizerAsync(cfg['splitFile'], storages['splits'], + storages['rule_based_segments'], localhost_mode), LocalSegmentSynchronizerAsync(cfg['segmentDirectory'], storages['splits'], storages['segments']), None, None, None, diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 58ea900a..fa7562d0 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -597,7 +597,7 @@ def _sanitize_condition(self, feature_flag): { "treatment": "off", "size": 100 } ], "label": "default rule" - }) + }) return feature_flag diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index a842bd36..175977a2 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -34,7 +34,7 @@ def test_auth(self, mocker): call_made = httpclient.get.mock_calls[0] # validate positional arguments - assert call_made[1] == ('auth', 'v2/auth?s=1.1', 'some_api_key') + assert call_made[1] == ('auth', 'v2/auth?s=1.3', 'some_api_key') # validate key-value args (headers) assert call_made[2]['extra_headers'] == { @@ -89,7 +89,7 @@ async def get(verb, url, key, extra_headers): # validate positional arguments assert self.verb == 'auth' - assert self.url == 'v2/auth?s=1.1' + assert self.url == 'v2/auth?s=1.3' assert self.key == 'some_api_key' assert self.headers == { 'SplitSDKVersion': 'python-%s' % __version__, diff --git a/tests/api/test_splits_api.py b/tests/api/test_splits_api.py index 1826ec23..af9819ea 100644 --- a/tests/api/test_splits_api.py +++ b/tests/api/test_splits_api.py @@ -24,7 +24,7 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' }, - query={'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'})] + query={'s': '1.3', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'})] httpclient.reset_mock() response = split_api.fetch_splits(123, 1, FetchOptions(True, 123, None,'set3')) @@ -36,7 +36,7 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'})] + query={'s': '1.3', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() response = split_api.fetch_splits(123, 122, FetchOptions(True, 123, None, 'set3')) @@ -48,7 +48,7 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123, 'sets': 'set3'})] + query={'s': '1.3', 'since': 123, 'rbSince': 122, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() def raise_exception(*args, **kwargs): @@ -92,7 +92,7 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' } - assert self.query == {'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'} + assert self.query == {'s': '1.3', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'} httpclient.reset_mock() response = await split_api.fetch_splits(123, 1, FetchOptions(True, 123, None, 'set3')) @@ -106,7 +106,7 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'} + assert self.query == {'s': '1.3', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'} httpclient.reset_mock() response = await split_api.fetch_splits(123, 122, FetchOptions(True, 123, None)) @@ -120,7 +120,7 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123} + assert self.query == {'s': '1.3', 'since': 123, 'rbSince': 122, 'till': 123} httpclient.reset_mock() def raise_exception(*args, **kwargs): diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 48a0fba2..526b7347 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -11,10 +11,11 @@ from splitio.client.factory import SplitFactory, Status as FactoryStatus, SplitFactoryAsync from splitio.models.impressions import Impression, Label from splitio.models.events import Event, EventWrapper -from splitio.storage import EventStorage, ImpressionStorage, SegmentStorage, SplitStorage +from splitio.storage import EventStorage, ImpressionStorage, SegmentStorage, SplitStorage, RuleBasedSegmentsStorage from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySegmentStorage, \ InMemoryImpressionStorage, InMemoryTelemetryStorage, InMemorySplitStorageAsync, \ - InMemoryImpressionStorageAsync, InMemorySegmentStorageAsync, InMemoryTelemetryStorageAsync, InMemoryEventStorageAsync + InMemoryImpressionStorageAsync, InMemorySegmentStorageAsync, InMemoryTelemetryStorageAsync, InMemoryEventStorageAsync, \ + InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.models.splits import Split, Status, from_raw from splitio.engine.impressions.impressions import Manager as ImpressionManager from splitio.engine.impressions.manager import Counter as ImpressionsCounter @@ -35,6 +36,7 @@ def test_get_treatment(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -55,6 +57,7 @@ def synchronize_config(*_): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -70,7 +73,7 @@ def synchronize_config(*_): type(factory).ready = ready_property factory.block_until_ready(5) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) client = Client(factory, recorder, True) client._evaluator = mocker.Mock(spec=Evaluator) client._evaluator.eval_with_context.return_value = { @@ -110,6 +113,7 @@ def test_get_treatment_with_config(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) @@ -123,6 +127,7 @@ def test_get_treatment_with_config(self, mocker): {'splits': split_storage, 'segments': segment_storage, 'impressions': impression_storage, + 'rule_based_segments': rb_segment_storage, 'events': event_storage}, mocker.Mock(), recorder, @@ -140,7 +145,7 @@ def synchronize_config(*_): mocker.patch('splitio.client.client.utctime_ms', new=lambda: 1000) mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) client = Client(factory, recorder, True) client._evaluator = mocker.Mock(spec=Evaluator) client._evaluator.eval_with_context.return_value = { @@ -185,11 +190,12 @@ def test_get_treatments(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -198,6 +204,7 @@ def test_get_treatments(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -263,11 +270,12 @@ def test_get_treatments_by_flag_set(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -276,6 +284,7 @@ def test_get_treatments_by_flag_set(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -340,11 +349,12 @@ def test_get_treatments_by_flag_sets(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -353,6 +363,7 @@ def test_get_treatments_by_flag_sets(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -417,11 +428,12 @@ def test_get_treatments_with_config(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -429,6 +441,7 @@ def test_get_treatments_with_config(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -498,11 +511,12 @@ def test_get_treatments_with_config_by_flag_set(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -510,6 +524,7 @@ def test_get_treatments_with_config_by_flag_set(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -576,11 +591,12 @@ def test_get_treatments_with_config_by_flag_sets(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -588,6 +604,7 @@ def test_get_treatments_with_config_by_flag_sets(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -654,6 +671,7 @@ def test_impression_toggle_optimized(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -673,6 +691,7 @@ def synchronize_config(*_): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -687,9 +706,9 @@ def synchronize_config(*_): factory.block_until_ready(5) split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = Client(factory, recorder, True) assert client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -716,6 +735,7 @@ def test_impression_toggle_debug(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -735,6 +755,7 @@ def synchronize_config(*_): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -749,9 +770,9 @@ def synchronize_config(*_): factory.block_until_ready(5) split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = Client(factory, recorder, True) assert client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -778,6 +799,7 @@ def test_impression_toggle_none(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -797,6 +819,7 @@ def synchronize_config(*_): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -811,9 +834,9 @@ def synchronize_config(*_): factory.block_until_ready(5) split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = Client(factory, recorder, True) assert client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -829,6 +852,7 @@ def test_destroy(self, mocker): """Test that destroy/destroyed calls are forwarded to the factory.""" split_storage = mocker.Mock(spec=SplitStorage) segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) @@ -839,6 +863,7 @@ def test_destroy(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -863,6 +888,7 @@ def test_track(self, mocker): """Test that destroy/destroyed calls are forwarded to the factory.""" split_storage = mocker.Mock(spec=SplitStorage) segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) event_storage.put.return_value = True @@ -874,6 +900,7 @@ def test_track(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -912,7 +939,8 @@ def test_evaluations_before_running_post_fork(self, mocker): impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + rb_segment_storage = InMemoryRuleBasedSegmentStorage() + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -921,6 +949,7 @@ def test_evaluations_before_running_post_fork(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': mocker.Mock()}, mocker.Mock(), @@ -991,11 +1020,13 @@ def test_telemetry_not_ready(self, mocker): impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + rb_segment_storage = InMemoryRuleBasedSegmentStorage() + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) recorder = StandardRecorder(impmanager, mocker.Mock(), mocker.Mock(), telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) factory = SplitFactory('localhost', {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': mocker.Mock()}, mocker.Mock(), @@ -1021,8 +1052,9 @@ def synchronize_config(*_): def test_telemetry_record_treatment_exception(self, mocker): split_storage = InMemorySplitStorage() - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) destroyed_property = mocker.PropertyMock() @@ -1038,6 +1070,7 @@ def test_telemetry_record_treatment_exception(self, mocker): factory = SplitFactory('localhost', {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1125,7 +1158,8 @@ def test_telemetry_method_latency(self, mocker): impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + rb_segment_storage = InMemoryRuleBasedSegmentStorage() + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) recorder = StandardRecorder(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1136,6 +1170,7 @@ def test_telemetry_method_latency(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1189,6 +1224,7 @@ def stop(*_): def test_telemetry_track_exception(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) destroyed_property = mocker.PropertyMock() @@ -1204,6 +1240,7 @@ def test_telemetry_track_exception(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1238,12 +1275,13 @@ async def test_get_treatment_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1257,6 +1295,7 @@ async def synchronize_config(*_): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1307,12 +1346,13 @@ async def test_get_treatment_with_config_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1320,6 +1360,7 @@ async def test_get_treatment_with_config_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1382,12 +1423,13 @@ async def test_get_treatments_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1395,6 +1437,7 @@ async def test_get_treatments_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1460,12 +1503,13 @@ async def test_get_treatments_by_flag_set_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1473,6 +1517,7 @@ async def test_get_treatments_by_flag_set_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1538,12 +1583,13 @@ async def test_get_treatments_by_flag_sets_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1551,6 +1597,7 @@ async def test_get_treatments_by_flag_sets_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1616,18 +1663,20 @@ async def test_get_treatments_with_config(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1698,18 +1747,20 @@ async def test_get_treatments_with_config_by_flag_set(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1780,18 +1831,20 @@ async def test_get_treatments_with_config_by_flag_sets(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1862,6 +1915,7 @@ async def test_impression_toggle_optimized(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -1877,6 +1931,7 @@ async def test_impression_toggle_optimized(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1890,9 +1945,9 @@ async def test_impression_toggle_optimized(self, mocker): await factory.block_until_ready(5) await split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = ClientAsync(factory, recorder, True) treatment = await client.get_treatment('some_key', 'SPLIT_1') @@ -1923,6 +1978,7 @@ async def test_impression_toggle_debug(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -1938,6 +1994,7 @@ async def test_impression_toggle_debug(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1951,9 +2008,9 @@ async def test_impression_toggle_debug(self, mocker): await factory.block_until_ready(5) await split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = ClientAsync(factory, recorder, True) assert await client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -1981,6 +2038,7 @@ async def test_impression_toggle_none(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -1996,6 +2054,7 @@ async def test_impression_toggle_none(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -2009,9 +2068,9 @@ async def test_impression_toggle_none(self, mocker): await factory.block_until_ready(5) await split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = ClientAsync(factory, recorder, True) assert await client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -2027,6 +2086,7 @@ async def test_track_async(self, mocker): """Test that destroy/destroyed calls are forwarded to the factory.""" split_storage = InMemorySplitStorageAsync() segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) self.events = [] @@ -2042,6 +2102,7 @@ async def put(event): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -2076,15 +2137,17 @@ async def test_telemetry_not_ready_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = InMemoryEventStorageAsync(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) factory = SplitFactoryAsync('localhost', {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': mocker.Mock()}, mocker.Mock(), @@ -2117,12 +2180,13 @@ async def test_telemetry_record_treatment_exception_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = InMemoryEventStorageAsync(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -2132,6 +2196,7 @@ async def test_telemetry_record_treatment_exception_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -2189,12 +2254,13 @@ async def test_telemetry_method_latency_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = InMemoryEventStorageAsync(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -2204,6 +2270,7 @@ async def test_telemetry_method_latency_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -2260,6 +2327,7 @@ async def synchronize_config(*_): async def test_telemetry_track_exception_async(self, mocker): split_storage = InMemorySplitStorageAsync() segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -2275,6 +2343,7 @@ async def test_telemetry_track_exception_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), diff --git a/tests/client/test_input_validator.py b/tests/client/test_input_validator.py index 5afecdd4..81b1c06b 100644 --- a/tests/client/test_input_validator.py +++ b/tests/client/test_input_validator.py @@ -6,7 +6,7 @@ from splitio.client.client import CONTROL, Client, _LOGGER as _logger, ClientAsync from splitio.client.manager import SplitManager, SplitManagerAsync from splitio.client.key import Key -from splitio.storage import SplitStorage, EventStorage, ImpressionStorage, SegmentStorage +from splitio.storage import SplitStorage, EventStorage, ImpressionStorage, SegmentStorage, RuleBasedSegmentsStorage from splitio.storage.inmemmory import InMemoryTelemetryStorage, InMemoryTelemetryStorageAsync, \ InMemorySplitStorage, InMemorySplitStorageAsync from splitio.models.splits import Split @@ -40,6 +40,7 @@ def test_get_treatment(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -277,6 +278,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -551,6 +553,7 @@ def test_track(self, mocker): { 'splits': split_storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': events_storage_mock, }, @@ -825,6 +828,7 @@ def test_get_treatments(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -969,6 +973,7 @@ def test_get_treatments_with_config(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1113,6 +1118,7 @@ def test_get_treatments_by_flag_set(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1228,6 +1234,7 @@ def test_get_treatments_by_flag_sets(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1353,6 +1360,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1472,6 +1480,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1624,6 +1633,7 @@ async def get_change_number(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1880,6 +1890,7 @@ async def get_change_number(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2123,6 +2134,7 @@ async def put(*_): { 'splits': split_storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': events_storage_mock, }, @@ -2407,6 +2419,7 @@ async def fetch_many(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2565,6 +2578,7 @@ async def fetch_many(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2726,6 +2740,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2865,6 +2880,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3014,6 +3030,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3156,6 +3173,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3312,6 +3330,7 @@ def test_split_(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3388,6 +3407,7 @@ async def get(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, diff --git a/tests/client/test_localhost.py b/tests/client/test_localhost.py index 280e79f9..598d6100 100644 --- a/tests/client/test_localhost.py +++ b/tests/client/test_localhost.py @@ -6,7 +6,7 @@ from splitio.sync.split import LocalSplitSynchronizer from splitio.models.splits import Split from splitio.models.grammar.matchers import AllKeysMatcher -from splitio.storage import SplitStorage +from splitio.storage import SplitStorage, RuleBasedSegmentsStorage class LocalHostStoragesTests(object): @@ -112,10 +112,10 @@ def test_update_splits(self, mocker): parse_yaml.return_value = {} storage_mock = mocker.Mock(spec=SplitStorage) storage_mock.get_split_names.return_value = [] - + rbs = mocker.Mock(spec=RuleBasedSegmentsStorage) parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('something', storage_mock) + sync = LocalSplitSynchronizer('something', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() @@ -124,7 +124,7 @@ def test_update_splits(self, mocker): parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('something.yaml', storage_mock) + sync = LocalSplitSynchronizer('something.yaml', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() @@ -133,7 +133,7 @@ def test_update_splits(self, mocker): parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('something.yml', storage_mock) + sync = LocalSplitSynchronizer('something.yml', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() @@ -142,7 +142,7 @@ def test_update_splits(self, mocker): parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('something.YAML', storage_mock) + sync = LocalSplitSynchronizer('something.YAML', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() @@ -151,7 +151,7 @@ def test_update_splits(self, mocker): parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('yaml', storage_mock) + sync = LocalSplitSynchronizer('yaml', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() diff --git a/tests/client/test_manager.py b/tests/client/test_manager.py index ae856f9a..19e1bbb0 100644 --- a/tests/client/test_manager.py +++ b/tests/client/test_manager.py @@ -26,8 +26,8 @@ def test_manager_calls(self, mocker): factory.ready = True manager = SplitManager(factory) - split1 = splits.from_raw(splits_json["splitChange1_1"]["splits"][0]) - split2 = splits.from_raw(splits_json["splitChange1_3"]["splits"][0]) + split1 = splits.from_raw(splits_json["splitChange1_1"]['ff']['d'][0]) + split2 = splits.from_raw(splits_json["splitChange1_3"]['ff']['d'][0]) storage.update([split1, split2], [], -1) manager._storage = storage @@ -98,8 +98,8 @@ async def test_manager_calls(self, mocker): factory.ready = True manager = SplitManagerAsync(factory) - split1 = splits.from_raw(splits_json["splitChange1_1"]["splits"][0]) - split2 = splits.from_raw(splits_json["splitChange1_3"]["splits"][0]) + split1 = splits.from_raw(splits_json["splitChange1_1"]['ff']['d'][0]) + split2 = splits.from_raw(splits_json["splitChange1_3"]['ff']['d'][0]) await storage.update([split1, split2], [], -1) manager._storage = storage diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index ab6e3293..124f5b37 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -1,53 +1,57 @@ -split11 = {"splits": [ +import copy + +rbsegments_json = [{ + "segment1": {"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []} +}] + +split11 = {"ff": {"t": 1675443569027, "s": -1, "d": [ {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": False}, {"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202,"seed": -1442762199, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443537882,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}], "sets": ["set_1", "set_2"]}, {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": True} - ],"since": -1,"till": 1675443569027} -split12 = {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 167544376728} -split13 = {"splits": [ + ]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split12 = {"ff": {"s": 1675443569027,"t": 167544376728, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split13 = {"ff": {"s": 1675443767288,"t": 1675443984594, "d": [ {"trafficTypeName": "user","name": "SPLIT_1","trafficAllocation": 100,"trafficAllocationSeed": -1780071202,"seed": -1442762199,"status": "ARCHIVED","killed": False,"defaultTreatment": "off","changeNumber": 1675443984594,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}]}, {"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443954220,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]} - ],"since": 1675443767288,"till": 1675443984594} + ]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} -split41 = split11 -split42 = split12 -split43 = split13 -split41["since"] = None -split41["till"] = None -split42["since"] = None -split42["till"] = None -split43["since"] = None -split43["till"] = None +split41 = {"ff": {"t": None, "s": None, "d": split11['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split42 = {"ff": {"t": None, "s": None, "d": split12['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split43 = {"ff": {"t": None, "s": None, "d": split13['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} -split61 = split11 -split62 = split12 -split63 = split13 - -split61["since"] = -1 -split61["till"] = -1 -split62["since"] = -1 -split62["till"] = -1 -split63["since"] = -1 -split63["till"] = -1 +split61 = {"ff": {"t": -1, "s": -1, "d": split11['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split62 = {"ff": {"t": -1, "s": -1, "d": split12['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split63 = {"ff": {"t": -1, "s": -1, "d": split13['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} splits_json = { "splitChange1_1": split11, "splitChange1_2": split12, "splitChange1_3": split13, - "splitChange2_1": {"splits": [{"name": "SPLIT_1","status": "ACTIVE","killed": False,"defaultTreatment": "off","configurations": {},"conditions": []}]}, - "splitChange3_1": {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, - "splitChange3_2": {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 1675443569027}, + "splitChange2_1": {"ff": {"t": -1, "s": -1, "d": [{"name": "SPLIT_1","status": "ACTIVE","killed": False,"defaultTreatment": "off","configurations": {},"conditions": []}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange3_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange3_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, "splitChange4_1": split41, "splitChange4_2": split42, "splitChange4_3": split43, - "splitChange5_1": {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, - "splitChange5_2": {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569026,"till": 1675443569026}, + "splitChange5_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange5_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569026,"till": 1675443569026}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, "splitChange6_1": split61, "splitChange6_2": split62, "splitChange6_3": split63, -} - -rbsegments_json = { - "segment1": {"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []} + "splitChange7_1": {"ff": { + "t": -1, + "s": -1, + "d": [{"changeNumber": 10,"trafficTypeName": "user","name": "rbs_feature_flag","trafficAllocation": 100,"trafficAllocationSeed": 1828377380,"seed": -286617921,"status": "ACTIVE","killed": False,"defaultTreatment": "off","algo": 2, + "conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": {"trafficType": "user"},"matcherType": "IN_RULE_BASED_SEGMENT","negate": False,"userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]},"partitions": [{"treatment": "on","size": 100},{"treatment": "off","size": 0}],"label": "in rule based segment sample_rule_based_segment"},{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": {"trafficType": "user"},"matcherType": "ALL_KEYS","negate": False}]},"partitions": [{"treatment": "on","size": 0},{"treatment": "off","size": 100}],"label": "default rule"}], + "configurations": {}, + "sets": [], + "impressionsDisabled": False + }] + }, "rbs": { + "t": 1675259356568, + "s": -1, + "d": [{"changeNumber": 5,"name": "sample_rule_based_segment","status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":["mauro@split.io","gaston@split.io"],"segments":[]}, + "conditions": [{"matcherGroup": {"combiner": "AND","matchers": [{"keySelector": {"trafficType": "user","attribute": "email"},"matcherType": "ENDS_WITH","negate": False,"whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]} + ]}} } \ No newline at end of file diff --git a/tests/integration/files/splitChanges.json b/tests/integration/files/splitChanges.json index 9125481d..d9ab1c24 100644 --- a/tests/integration/files/splitChanges.json +++ b/tests/integration/files/splitChanges.json @@ -1,5 +1,6 @@ { - "splits": [ + "ff": { + "d": [ { "orgId": null, "environment": null, @@ -321,8 +322,111 @@ } ], "sets": [] - } + }, + { + "changeNumber": 10, + "trafficTypeName": "user", + "name": "rbs_feature_flag", + "trafficAllocation": 100, + "trafficAllocationSeed": 1828377380, + "seed": -286617921, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "algo": 2, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "ALL_KEYS", + "negate": false + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ], + "configurations": {}, + "sets": [], + "impressionsDisabled": false + } ], - "since": -1, - "till": 1457726098069 -} + "s": -1, + "t": 1457726098069 +}, "rbs": {"t": -1, "s": -1, "d": [{ + "changeNumber": 123, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] +}]}} diff --git a/tests/integration/files/split_changes.json b/tests/integration/files/split_changes.json index 6084b108..f0708043 100644 --- a/tests/integration/files/split_changes.json +++ b/tests/integration/files/split_changes.json @@ -1,5 +1,6 @@ { - "splits": [ + "ff": { + "d": [ { "orgId": null, "environment": null, @@ -323,6 +324,7 @@ "sets": [] } ], - "since": -1, - "till": 1457726098069 + "s": -1, + "t": 1457726098069 +}, "rbs": {"t": -1, "s": -1, "d": []} } diff --git a/tests/integration/files/split_changes_temp.json b/tests/integration/files/split_changes_temp.json index 162c0b17..64575226 100644 --- a/tests/integration/files/split_changes_temp.json +++ b/tests/integration/files/split_changes_temp.json @@ -1 +1 @@ -{"splits": [{"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202, "seed": -1442762199, "status": "ARCHIVED", "killed": false, "defaultTreatment": "off", "changeNumber": 1675443984594, "algo": 2, "configurations": {}, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": null}, "matcherType": "ALL_KEYS", "negate": false, "userDefinedSegmentMatcherData": null, "whitelistMatcherData": null, "unaryNumericMatcherData": null, "betweenMatcherData": null, "booleanMatcherData": null, "dependencyMatcherData": null, "stringMatcherData": null}]}, "partitions": [{"treatment": "on", "size": 0}, {"treatment": "off", "size": 100}], "label": "default rule"}]}, {"trafficTypeName": "user", "name": "SPLIT_2", "trafficAllocation": 100, "trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "changeNumber": 1675443954220, "algo": 2, "configurations": {}, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": null}, "matcherType": "ALL_KEYS", "negate": false, "userDefinedSegmentMatcherData": null, "whitelistMatcherData": null, "unaryNumericMatcherData": null, "betweenMatcherData": null, "booleanMatcherData": null, "dependencyMatcherData": null, "stringMatcherData": null}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "default rule"}]}], "since": -1, "till": -1} \ No newline at end of file +{"ff": {"t": -1, "s": -1, "d": [{"changeNumber": 10, "trafficTypeName": "user", "name": "rbs_feature_flag", "trafficAllocation": 100, "trafficAllocationSeed": 1828377380, "seed": -286617921, "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "algo": 2, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "IN_RULE_BASED_SEGMENT", "negate": false, "userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "in rule based segment sample_rule_based_segment"}, {"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "ALL_KEYS", "negate": false}]}, "partitions": [{"treatment": "on", "size": 0}, {"treatment": "off", "size": 100}], "label": "default rule"}], "configurations": {}, "sets": [], "impressionsDisabled": false}]}, "rbs": {"t": 1675259356568, "s": -1, "d": [{"changeNumber": 5, "name": "sample_rule_based_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": ["mauro@split.io", "gaston@split.io"], "segments": []}, "conditions": [{"matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": "email"}, "matcherType": "ENDS_WITH", "negate": false, "whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]}]}} \ No newline at end of file diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index 94a11624..c8a6a666 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -15,15 +15,17 @@ from splitio.storage.inmemmory import InMemoryEventStorage, InMemoryImpressionStorage, \ InMemorySegmentStorage, InMemorySplitStorage, InMemoryTelemetryStorage, InMemorySplitStorageAsync,\ InMemoryEventStorageAsync, InMemoryImpressionStorageAsync, InMemorySegmentStorageAsync, \ - InMemoryTelemetryStorageAsync + InMemoryTelemetryStorageAsync, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.storage.redis import RedisEventsStorage, RedisImpressionsStorage, \ RedisSplitStorage, RedisSegmentStorage, RedisTelemetryStorage, RedisEventsStorageAsync,\ - RedisImpressionsStorageAsync, RedisSegmentStorageAsync, RedisSplitStorageAsync, RedisTelemetryStorageAsync + RedisImpressionsStorageAsync, RedisSegmentStorageAsync, RedisSplitStorageAsync, RedisTelemetryStorageAsync, \ + RedisRuleBasedSegmentsStorage, RedisRuleBasedSegmentsStorageAsync from splitio.storage.pluggable import PluggableEventsStorage, PluggableImpressionsStorage, PluggableSegmentStorage, \ PluggableTelemetryStorage, PluggableSplitStorage, PluggableEventsStorageAsync, PluggableImpressionsStorageAsync, \ - PluggableSegmentStorageAsync, PluggableSplitStorageAsync, PluggableTelemetryStorageAsync + PluggableSegmentStorageAsync, PluggableSplitStorageAsync, PluggableTelemetryStorageAsync, \ + PluggableRuleBasedSegmentsStorage, PluggableRuleBasedSegmentsStorageAsync from splitio.storage.adapters.redis import build, RedisAdapter, RedisAdapterAsync, build_async -from splitio.models import splits, segments +from splitio.models import splits, segments, rule_based_segments from splitio.engine.impressions.impressions import Manager as ImpressionsManager, ImpressionsMode from splitio.engine.impressions import set_classes, set_classes_async from splitio.engine.impressions.strategies import StrategyDebugMode, StrategyOptimizedMode, StrategyNoneMode @@ -154,6 +156,16 @@ def _get_treatment(factory): if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): _validate_last_impressions(client, ('regex_test', 'abc4', 'on')) + # test rule based segment matcher + assert client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + _validate_last_impressions(client, ('rbs_feature_flag', 'bilal@split.io', 'on')) + + # test rule based segment matcher + assert client.get_treatment('mauro@split.io', 'rbs_feature_flag', {'email': 'mauro@split.io'}) == 'off' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + _validate_last_impressions(client, ('rbs_feature_flag', 'mauro@split.io', 'off')) + def _get_treatment_with_config(factory): """Test client.get_treatment_with_config().""" try: @@ -438,8 +450,8 @@ def _manager_methods(factory): assert result.change_number == 123 assert result.configs['on'] == '{"size":15,"test":20}' - assert len(manager.split_names()) == 7 - assert len(manager.splits()) == 7 + assert len(manager.split_names()) == 8 + assert len(manager.splits()) == 8 class InMemoryDebugIntegrationTests(object): """Inmemory storage-based integration tests.""" @@ -448,13 +460,17 @@ def setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: split_storage.update([splits.from_raw(split)], [], 0) + for rbs in data['rbs']['d']: + rb_segment_storage.update([rule_based_segments.from_raw(rbs)], [], 0) + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) @@ -473,6 +489,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -604,13 +621,16 @@ def setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - + rb_segment_storage = InMemoryRuleBasedSegmentStorage() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: split_storage.update([splits.from_raw(split)], [], 0) + for rbs in data['rbs']['d']: + rb_segment_storage.update([rule_based_segments.from_raw(rbs)], [], 0) + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) @@ -629,6 +649,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -733,16 +754,20 @@ def setup_method(self): redis_client = build(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorage(redis_client) segment_storage = RedisSegmentStorage(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorage(redis_client) split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: redis_client.set(split_storage._get_key(split['name']), json.dumps(split)) if split.get('sets') is not None: for flag_set in split.get('sets'): redis_client.sadd(split_storage._get_flag_set_key(flag_set), split['name']) - redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['till']) + redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['ff']['t']) + + for rbs in data['rbs']['d']: + redis_client.set(rb_segment_storage._get_key(rbs['name']), json.dumps(rbs)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -763,6 +788,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -899,7 +925,10 @@ def teardown_method(self): "SPLITIO.split.set.set1", "SPLITIO.split.set.set2", "SPLITIO.split.set.set3", - "SPLITIO.split.set.set4" + "SPLITIO.split.set.set4", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] redis_client = RedisAdapter(StrictRedis()) @@ -915,13 +944,17 @@ def setup_method(self): redis_client = build(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorage(redis_client, True) segment_storage = RedisSegmentStorage(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorage(redis_client) split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: redis_client.set(split_storage._get_key(split['name']), json.dumps(split)) - redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['till']) + redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['ff']['t']) + + for rbs in data['rbs']['d']: + redis_client.set(rb_segment_storage._get_key(rbs['name']), json.dumps(rbs)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -943,6 +976,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -986,7 +1020,7 @@ def test_localhost_json_e2e(self): assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' - assert client.get_treatment("key", "SPLIT_2", None) == 'off' + assert client.get_treatment("key", "SPLIT_2", None) == 'on' #?? self._update_temp_file(splits_json['splitChange1_3']) self._synchronize_now() @@ -1044,7 +1078,7 @@ def test_localhost_json_e2e(self): self._synchronize_now() assert sorted(self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] - assert client.get_treatment("key", "SPLIT_2", None) == 'on' + assert client.get_treatment("key", "SPLIT_2", None) == 'off' #?? # Tests 6 self.factory._storages['splits'].update([], ['SPLIT_2'], -1) @@ -1069,6 +1103,12 @@ def test_localhost_json_e2e(self): assert client.get_treatment("key", "SPLIT_1", None) == 'control' assert client.get_treatment("key", "SPLIT_2", None) == 'on' + # rule based segment test + self._update_temp_file(splits_json['splitChange7_1']) + self._synchronize_now() + assert client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' + assert client.get_treatment('mauro@split.io', 'rbs_feature_flag', {'email': 'mauro@split.io'}) == 'off' + def _update_temp_file(self, json_body): f = open(os.path.join(os.path.dirname(__file__), 'files','split_changes_temp.json'), 'w') f.write(json.dumps(json_body)) @@ -1106,7 +1146,6 @@ def test_incorrect_file_e2e(self): factory.destroy(event) event.wait() - def test_localhost_e2e(self): """Instantiate a client with a YAML file and issue get_treatment() calls.""" filename = os.path.join(os.path.dirname(__file__), 'files', 'file2.yaml') @@ -1136,7 +1175,6 @@ def test_localhost_e2e(self): factory.destroy(event) event.wait() - class PluggableIntegrationTests(object): """Pluggable storage-based integration tests.""" @@ -1146,6 +1184,7 @@ def setup_method(self): self.pluggable_storage_adapter = StorageMockAdapter() split_storage = PluggableSplitStorage(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorage(self.pluggable_storage_adapter) + rb_segment_storage = PluggableRuleBasedSegmentsStorage(self.pluggable_storage_adapter) telemetry_pluggable_storage = PluggableTelemetryStorage(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducer(telemetry_pluggable_storage) @@ -1155,6 +1194,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorage(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorage(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -1178,12 +1218,15 @@ def setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) if split.get('sets') is not None: for flag_set in split.get('sets'): self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) - self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -1319,7 +1362,10 @@ def teardown_method(self): "SPLITIO.split.set.set1", "SPLITIO.split.set.set2", "SPLITIO.split.set.set3", - "SPLITIO.split.set.set4" + "SPLITIO.split.set.set4", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: self.pluggable_storage_adapter.delete(key) @@ -1333,6 +1379,7 @@ def setup_method(self): self.pluggable_storage_adapter = StorageMockAdapter() split_storage = PluggableSplitStorage(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorage(self.pluggable_storage_adapter) + rb_segment_storage = PluggableRuleBasedSegmentsStorage(self.pluggable_storage_adapter) telemetry_pluggable_storage = PluggableTelemetryStorage(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducer(telemetry_pluggable_storage) @@ -1342,6 +1389,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorage(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorage(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -1365,12 +1413,15 @@ def setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: if split.get('sets') is not None: for flag_set in split.get('sets'): self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) - self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -1483,7 +1534,10 @@ def teardown_method(self): "SPLITIO.split.set.set1", "SPLITIO.split.set.set2", "SPLITIO.split.set.set3", - "SPLITIO.split.set.set4" + "SPLITIO.split.set.set4", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: self.pluggable_storage_adapter.delete(key) @@ -1497,7 +1551,7 @@ def setup_method(self): self.pluggable_storage_adapter = StorageMockAdapter() split_storage = PluggableSplitStorage(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorage(self.pluggable_storage_adapter) - + rb_segment_storage = PluggableRuleBasedSegmentsStorage(self.pluggable_storage_adapter) telemetry_pluggable_storage = PluggableTelemetryStorage(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducer(telemetry_pluggable_storage) telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() @@ -1506,6 +1560,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorage(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorage(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -1552,12 +1607,15 @@ def setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: if split.get('sets') is not None: for flag_set in split.get('sets'): self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) - self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -1668,9 +1726,9 @@ def test_optimized(self): split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = InMemoryTelemetryStorage() @@ -1681,6 +1739,7 @@ def test_optimized(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -1722,9 +1781,9 @@ def test_debug(self): split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = InMemoryTelemetryStorage() @@ -1735,6 +1794,7 @@ def test_debug(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -1776,9 +1836,9 @@ def test_none(self): split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = InMemoryTelemetryStorage() @@ -1789,6 +1849,7 @@ def test_none(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -1838,9 +1899,9 @@ def test_optimized(self): split_storage = RedisSplitStorage(redis_client, True) segment_storage = RedisSegmentStorage(redis_client) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = RedisTelemetryStorage(redis_client, metadata) @@ -1851,6 +1912,7 @@ def test_optimized(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': RedisRuleBasedSegmentsStorage(redis_client), 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -1901,9 +1963,9 @@ def test_debug(self): split_storage = RedisSplitStorage(redis_client, True) segment_storage = RedisSegmentStorage(redis_client) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = RedisTelemetryStorage(redis_client, metadata) @@ -1914,6 +1976,7 @@ def test_debug(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': RedisRuleBasedSegmentsStorage(redis_client), 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -1964,9 +2027,9 @@ def test_none(self): split_storage = RedisSplitStorage(redis_client, True) segment_storage = RedisSegmentStorage(redis_client) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = RedisTelemetryStorage(redis_client, metadata) @@ -1977,6 +2040,7 @@ def test_none(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': RedisRuleBasedSegmentsStorage(redis_client), 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -2046,13 +2110,17 @@ async def _setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await split_storage.update([splits.from_raw(split)], [], -1) + for rbs in data['rbs']['d']: + await rb_segment_storage.update([rule_based_segments.from_raw(rbs)], [], 0) + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) @@ -2071,6 +2139,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -2212,13 +2281,16 @@ async def _setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() - + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await split_storage.update([splits.from_raw(split)], [], -1) + for rbs in data['rbs']['d']: + await rb_segment_storage.update([rule_based_segments.from_raw(rbs)], [], 0) + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) @@ -2237,6 +2309,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -2364,17 +2437,20 @@ async def _setup_method(self): split_storage = RedisSplitStorageAsync(redis_client) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await redis_client.set(split_storage._get_key(split['name']), json.dumps(split)) if split.get('sets') is not None: for flag_set in split.get('sets'): await redis_client.sadd(split_storage._get_flag_set_key(flag_set), split['name']) + await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['ff']['t']) - await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['till']) + for rbs in data['rbs']['d']: + await redis_client.set(rb_segment_storage._get_key(rbs['name']), json.dumps(rbs)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -2396,6 +2472,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -2560,7 +2637,10 @@ async def _clear_cache(self, redis_client): "SPLITIO.segment.employees.till", "SPLITIO.split.whitelist_feature", "SPLITIO.telemetry.latencies", - "SPLITIO.split.dependency_test" + "SPLITIO.split.dependency_test", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: await redis_client.delete(key) @@ -2579,16 +2659,20 @@ async def _setup_method(self): split_storage = RedisSplitStorageAsync(redis_client, True) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await redis_client.set(split_storage._get_key(split['name']), json.dumps(split)) if split.get('sets') is not None: for flag_set in split.get('sets'): await redis_client.sadd(split_storage._get_flag_set_key(flag_set), split['name']) - await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['till']) + await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['ff']['t']) + + for rbs in data['rbs']['d']: + await redis_client.set(rb_segment_storage._get_key(rbs['name']), json.dumps(rbs)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -2610,6 +2694,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -2659,7 +2744,7 @@ async def test_localhost_json_e2e(self): assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' - assert await client.get_treatment("key", "SPLIT_2", None) == 'off' + assert await client.get_treatment("key", "SPLIT_2", None) == 'on' #?? self._update_temp_file(splits_json['splitChange1_3']) await self._synchronize_now() @@ -2717,7 +2802,7 @@ async def test_localhost_json_e2e(self): await self._synchronize_now() assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] - assert await client.get_treatment("key", "SPLIT_2", None) == 'on' + assert await client.get_treatment("key", "SPLIT_2", None) == 'off' #?? # Tests 6 await self.factory._storages['splits'].update([], ['SPLIT_2'], -1) @@ -2742,6 +2827,12 @@ async def test_localhost_json_e2e(self): assert await client.get_treatment("key", "SPLIT_1", None) == 'control' assert await client.get_treatment("key", "SPLIT_2", None) == 'on' + # rule based segment test + self._update_temp_file(splits_json['splitChange7_1']) + await self._synchronize_now() + assert await client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' + assert await client.get_treatment('mauro@split.io', 'rbs_feature_flag', {'email': 'mauro@split.io'}) == 'off' + def _update_temp_file(self, json_body): f = open(os.path.join(os.path.dirname(__file__), 'files','split_changes_temp.json'), 'w') f.write(json.dumps(json_body)) @@ -2821,6 +2912,7 @@ async def _setup_method(self): self.pluggable_storage_adapter = StorageMockAdapterAsync() split_storage = PluggableSplitStorageAsync(self.pluggable_storage_adapter, 'myprefix') segment_storage = PluggableSegmentStorageAsync(self.pluggable_storage_adapter, 'myprefix') + rb_segment_storage = PluggableRuleBasedSegmentsStorageAsync(self.pluggable_storage_adapter, 'myprefix') telemetry_pluggable_storage = await PluggableTelemetryStorageAsync.create(self.pluggable_storage_adapter, metadata, 'myprefix') telemetry_producer = TelemetryStorageProducerAsync(telemetry_pluggable_storage) @@ -2830,6 +2922,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorageAsync(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorageAsync(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -2858,11 +2951,14 @@ async def _setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) for flag_set in split.get('sets'): await self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) - await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['d']) + + for rbs in data['rbs']['d']: + await self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -3023,7 +3119,10 @@ async def _teardown_method(self): "SPLITIO.split.regex_test", "SPLITIO.segment.human_beigns.till", "SPLITIO.split.boolean_test", - "SPLITIO.split.dependency_test" + "SPLITIO.split.dependency_test", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: @@ -3041,6 +3140,7 @@ async def _setup_method(self): self.pluggable_storage_adapter = StorageMockAdapterAsync() split_storage = PluggableSplitStorageAsync(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorageAsync(self.pluggable_storage_adapter) + rb_segment_storage = PluggableRuleBasedSegmentsStorageAsync(self.pluggable_storage_adapter, 'myprefix') telemetry_pluggable_storage = await PluggableTelemetryStorageAsync.create(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducerAsync(telemetry_pluggable_storage) @@ -3050,6 +3150,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorageAsync(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorageAsync(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -3080,11 +3181,14 @@ async def _setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) for flag_set in split.get('sets'): await self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) - await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + await self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -3230,7 +3334,10 @@ async def _teardown_method(self): "SPLITIO.split.regex_test", "SPLITIO.segment.human_beigns.till", "SPLITIO.split.boolean_test", - "SPLITIO.split.dependency_test" + "SPLITIO.split.dependency_test", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: @@ -3248,6 +3355,7 @@ async def _setup_method(self): self.pluggable_storage_adapter = StorageMockAdapterAsync() split_storage = PluggableSplitStorageAsync(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorageAsync(self.pluggable_storage_adapter) + rb_segment_storage = PluggableRuleBasedSegmentsStorageAsync(self.pluggable_storage_adapter, 'myprefix') telemetry_pluggable_storage = await PluggableTelemetryStorageAsync.create(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducerAsync(telemetry_pluggable_storage) @@ -3257,6 +3365,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorageAsync(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorageAsync(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -3302,11 +3411,14 @@ async def _setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) for flag_set in split.get('sets'): await self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) - await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + await self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -3461,7 +3573,10 @@ async def _teardown_method(self): "SPLITIO.split.regex_test", "SPLITIO.segment.human_beigns.till", "SPLITIO.split.boolean_test", - "SPLITIO.split.dependency_test" + "SPLITIO.split.dependency_test", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: @@ -3475,9 +3590,9 @@ async def test_optimized(self): split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() - await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -3488,6 +3603,7 @@ async def test_optimized(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -3534,9 +3650,9 @@ async def test_debug(self): split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() - await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -3547,6 +3663,7 @@ async def test_debug(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -3593,9 +3710,9 @@ async def test_none(self): split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() - await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -3606,6 +3723,7 @@ async def test_none(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -3659,10 +3777,11 @@ async def test_optimized(self): redis_client = await build_async(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorageAsync(redis_client, True) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = await RedisTelemetryStorageAsync.create(redis_client, metadata) @@ -3673,6 +3792,7 @@ async def test_optimized(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -3726,10 +3846,11 @@ async def test_debug(self): redis_client = await build_async(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorageAsync(redis_client, True) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = await RedisTelemetryStorageAsync.create(redis_client, metadata) @@ -3740,6 +3861,7 @@ async def test_debug(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -3793,10 +3915,11 @@ async def test_none(self): redis_client = await build_async(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorageAsync(redis_client, True) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = await RedisTelemetryStorageAsync.create(redis_client, metadata) @@ -3807,6 +3930,7 @@ async def test_none(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -3981,6 +4105,16 @@ async def _get_treatment_async(factory): if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): await _validate_last_impressions_async(client, ('regex_test', 'abc4', 'on')) + # test rule based segment matcher + assert await client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + await _validate_last_impressions_async(client, ('rbs_feature_flag', 'bilal@split.io', 'on')) + + # test rule based segment matcher + assert await client.get_treatment('mauro@split.io', 'rbs_feature_flag', {'email': 'mauro@split.io'}) == 'off' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + await _validate_last_impressions_async(client, ('rbs_feature_flag', 'mauro@split.io', 'off')) + async def _get_treatment_with_config_async(factory): """Test client.get_treatment_with_config().""" try: @@ -4265,5 +4399,5 @@ async def _manager_methods_async(factory): assert result.change_number == 123 assert result.configs['on'] == '{"size":15,"test":20}' - assert len(await manager.split_names()) == 7 - assert len(await manager.splits()) == 7 + assert len(await manager.split_names()) == 8 + assert len(await manager.splits()) == 8 diff --git a/tests/integration/test_pluggable_integration.py b/tests/integration/test_pluggable_integration.py index 844cde14..20545da5 100644 --- a/tests/integration/test_pluggable_integration.py +++ b/tests/integration/test_pluggable_integration.py @@ -23,12 +23,12 @@ def test_put_fetch(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: adapter.set(storage._prefix.format(feature_flag_name=split['name']), split) adapter.increment(storage._traffic_type_prefix.format(traffic_type_name=split['trafficTypeName']), 1) - adapter.set(storage._feature_flag_till_prefix, data['till']) + adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) - split_objects = [splits.from_raw(raw) for raw in data['splits']] + split_objects = [splits.from_raw(raw) for raw in data['ff']['d']] for split_object in split_objects: raw = split_object.to_json() @@ -53,8 +53,8 @@ def test_put_fetch(self): assert len(original_condition.matchers) == len(fetched_condition.matchers) assert len(original_condition.partitions) == len(fetched_condition.partitions) - adapter.set(storage._feature_flag_till_prefix, data['till']) - assert storage.get_change_number() == data['till'] + adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) + assert storage.get_change_number() == data['ff']['t'] assert storage.is_valid_traffic_type('user') is True assert storage.is_valid_traffic_type('account') is True @@ -89,12 +89,12 @@ def test_get_all(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: adapter.set(storage._prefix.format(feature_flag_name=split['name']), split) adapter.increment(storage._traffic_type_prefix.format(traffic_type_name=split['trafficTypeName']), 1) - adapter.set(storage._feature_flag_till_prefix, data['till']) + adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) - split_objects = [splits.from_raw(raw) for raw in data['splits']] + split_objects = [splits.from_raw(raw) for raw in data['ff']['d']] original_splits = {split.name: split for split in split_objects} fetched_names = storage.get_split_names() fetched_splits = {split.name: split for split in storage.get_all_splits()} @@ -260,12 +260,12 @@ async def test_put_fetch(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await adapter.set(storage._prefix.format(feature_flag_name=split['name']), split) await adapter.increment(storage._traffic_type_prefix.format(traffic_type_name=split['trafficTypeName']), 1) - await adapter.set(storage._feature_flag_till_prefix, data['till']) + await adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) - split_objects = [splits.from_raw(raw) for raw in data['splits']] + split_objects = [splits.from_raw(raw) for raw in data['ff']['d']] for split_object in split_objects: raw = split_object.to_json() @@ -290,8 +290,8 @@ async def test_put_fetch(self): assert len(original_condition.matchers) == len(fetched_condition.matchers) assert len(original_condition.partitions) == len(fetched_condition.partitions) - await adapter.set(storage._feature_flag_till_prefix, data['till']) - assert await storage.get_change_number() == data['till'] + await adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) + assert await storage.get_change_number() == data['ff']['t'] assert await storage.is_valid_traffic_type('user') is True assert await storage.is_valid_traffic_type('account') is True @@ -327,12 +327,12 @@ async def test_get_all(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await adapter.set(storage._prefix.format(feature_flag_name=split['name']), split) await adapter.increment(storage._traffic_type_prefix.format(traffic_type_name=split['trafficTypeName']), 1) - await adapter.set(storage._feature_flag_till_prefix, data['till']) + await adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) - split_objects = [splits.from_raw(raw) for raw in data['splits']] + split_objects = [splits.from_raw(raw) for raw in data['ff']['d']] original_splits = {split.name: split for split in split_objects} fetched_names = await storage.get_split_names() fetched_splits = {split.name: split for split in await storage.get_all_splits()} diff --git a/tests/integration/test_redis_integration.py b/tests/integration/test_redis_integration.py index e53ab4e2..4b70898b 100644 --- a/tests/integration/test_redis_integration.py +++ b/tests/integration/test_redis_integration.py @@ -28,7 +28,7 @@ def test_put_fetch(self): with open(os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json'), 'r') as flo: split_changes = json.load(flo) - split_objects = [splits.from_raw(raw) for raw in split_changes['splits']] + split_objects = [splits.from_raw(raw) for raw in split_changes['ff']['d']] for split_object in split_objects: raw = split_object.to_json() adapter.set(RedisSplitStorage._FEATURE_FLAG_KEY.format(feature_flag_name=split_object.name), json.dumps(raw)) @@ -55,8 +55,8 @@ def test_put_fetch(self): assert len(original_condition.matchers) == len(fetched_condition.matchers) assert len(original_condition.partitions) == len(fetched_condition.partitions) - adapter.set(RedisSplitStorage._FEATURE_FLAG_TILL_KEY, split_changes['till']) - assert storage.get_change_number() == split_changes['till'] + adapter.set(RedisSplitStorage._FEATURE_FLAG_TILL_KEY, split_changes['ff']['t']) + assert storage.get_change_number() == split_changes['ff']['t'] assert storage.is_valid_traffic_type('user') is True assert storage.is_valid_traffic_type('account') is True @@ -93,7 +93,7 @@ def test_get_all(self): with open(os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json'), 'r') as flo: split_changes = json.load(flo) - split_objects = [splits.from_raw(raw) for raw in split_changes['splits']] + split_objects = [splits.from_raw(raw) for raw in split_changes['ff']['d']] for split_object in split_objects: raw = split_object.to_json() adapter.set(RedisSplitStorage._FEATURE_FLAG_KEY.format(feature_flag_name=split_object.name), json.dumps(raw)) @@ -262,7 +262,7 @@ async def test_put_fetch(self): with open(os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json'), 'r') as flo: split_changes = json.load(flo) - split_objects = [splits.from_raw(raw) for raw in split_changes['splits']] + split_objects = [splits.from_raw(raw) for raw in split_changes['ff']['d']] for split_object in split_objects: raw = split_object.to_json() await adapter.set(RedisSplitStorage._FEATURE_FLAG_KEY.format(feature_flag_name=split_object.name), json.dumps(raw)) @@ -289,8 +289,8 @@ async def test_put_fetch(self): assert len(original_condition.matchers) == len(fetched_condition.matchers) assert len(original_condition.partitions) == len(fetched_condition.partitions) - await adapter.set(RedisSplitStorageAsync._FEATURE_FLAG_TILL_KEY, split_changes['till']) - assert await storage.get_change_number() == split_changes['till'] + await adapter.set(RedisSplitStorageAsync._FEATURE_FLAG_TILL_KEY, split_changes['ff']['t']) + assert await storage.get_change_number() == split_changes['ff']['t'] assert await storage.is_valid_traffic_type('user') is True assert await storage.is_valid_traffic_type('account') is True @@ -326,7 +326,7 @@ async def test_get_all(self): with open(os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json'), 'r') as flo: split_changes = json.load(flo) - split_objects = [splits.from_raw(raw) for raw in split_changes['splits']] + split_objects = [splits.from_raw(raw) for raw in split_changes['ff']['d']] for split_object in split_objects: raw = split_object.to_json() await adapter.set(RedisSplitStorageAsync._FEATURE_FLAG_KEY.format(feature_flag_name=split_object.name), json.dumps(raw)) diff --git a/tests/integration/test_streaming_e2e.py b/tests/integration/test_streaming_e2e.py index a87ef59d..764475de 100644 --- a/tests/integration/test_streaming_e2e.py +++ b/tests/integration/test_streaming_e2e.py @@ -34,15 +34,17 @@ def test_happiness(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]}, + 'rbs': {'s': -1, 't': -1, 'd': []} }, - 1: { - 'since': 1, - 'till': 1, - 'splits': [] + 1: {'ff': { + 's': 1, + 't': 1, + 'd': []}, + 'rbs': {'s': -1, 't': -1, 'd': []} } } @@ -76,22 +78,26 @@ def test_happiness(self): assert(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events[len(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events)-1]._type == StreamingEventTypes.SYNC_MODE_UPDATE.value) assert(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events[len(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events)-1]._data == SSESyncMode.STREAMING.value) split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + 'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'s': -1, 't': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'s': -1, 't': -1, 'd': []}} sse_server.publish(make_split_change_event(2)) time.sleep(1) assert factory.client().get_treatment('maldo', 'split1') == 'off' split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_split_with_segment('split2', 2, True, False, - 'off', 'user', 'off', 'segment1')] - } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + 'ff': { + 's': 2, + 't': 3, + 'd': [make_split_with_segment('split2', 2, True, False, + 'off', 'user', 'off', 'segment1')]}, + 'rbs': {'s': -1, 't': -1, 'd': []} + } + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'s': -1, 't': -1, 'd': []}} segment_changes[('segment1', -1)] = { 'name': 'segment1', 'added': ['maldo'], @@ -141,49 +147,49 @@ def test_happiness(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after second notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Segment change notification @@ -222,12 +228,14 @@ def test_occupancy_flicker(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'s': -1, 't': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'s': -1, 't': -1, 'd': []}} } segment_changes = {} @@ -266,11 +274,12 @@ def test_occupancy_flicker(self): # After dropping occupancy, the sdk should switch to polling # and perform a syncAll that gets this change split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + 'ff': {'s': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_pri', 0)) sse_server.publish(make_occupancy('control_sec', 0)) @@ -282,11 +291,12 @@ def test_occupancy_flicker(self): # We restore occupancy, and it should be fetched by the # sync all after streaming is restored. split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + 'ff': {'s': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_pri', 1)) time.sleep(2) @@ -295,22 +305,24 @@ def test_occupancy_flicker(self): # Now we make another change and send an event so it's propagated split_changes[3] = { - 'since': 3, - 'till': 4, - 'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)] + 'ff': {'s': 3, + 't': 4, + 'd': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[4] = {'since': 4, 'till': 4, 'splits': []} + split_changes[4] = {'ff': {'s': 4, 't': 4, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(4)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'off' # Kill the split split_changes[4] = { - 'since': 4, - 'till': 5, - 'splits': [make_simple_split('split1', 5, True, True, 'frula', 'user', False)] + 'ff': {'s': 4, + 't': 5, + 'd': [make_simple_split('split1', 5, True, True, 'frula', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[5] = {'since': 5, 'till': 5, 'splits': []} + split_changes[5] = {'ff': {'s': 5, 't': 5, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_kill_event('split1', 'frula', 5)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'frula' @@ -342,73 +354,73 @@ def test_occupancy_flicker(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after second notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Split kill req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=5' + assert req.path == '/api/splitChanges?s=1.3&since=5&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -435,12 +447,14 @@ def test_start_without_occupancy(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -478,11 +492,13 @@ def test_start_without_occupancy(self): # After restoring occupancy, the sdk should switch to polling # and perform a syncAll that gets this change split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + 'ff': {'s': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_sec', 1)) time.sleep(2) @@ -516,43 +532,43 @@ def test_start_without_occupancy(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push down req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push restored req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Second iteration of previous syncAll req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -562,7 +578,7 @@ def test_start_without_occupancy(self): sse_server.publish(sse_server.GRACEFUL_REQUEST_END) sse_server.stop() split_backend.stop() - + def test_streaming_status_changes(self): """Test changes between streaming enabled, paused and disabled.""" auth_server_response = { @@ -579,12 +595,14 @@ def test_streaming_status_changes(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -623,11 +641,12 @@ def test_streaming_status_changes(self): # After dropping occupancy, the sdk should switch to polling # and perform a syncAll that gets this change split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + 'ff': {'s': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_PAUSED', 1)) time.sleep(2) @@ -638,11 +657,12 @@ def test_streaming_status_changes(self): # We restore occupancy, and it should be fetched by the # sync all after streaming is restored. split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + 'ff': {'s': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_ENABLED', 2)) time.sleep(2) @@ -651,22 +671,26 @@ def test_streaming_status_changes(self): # Now we make another change and send an event so it's propagated split_changes[3] = { - 'since': 3, - 'till': 4, - 'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)] + 'ff': {'s': 3, + 't': 4, + 'd': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[4] = {'since': 4, 'till': 4, 'splits': []} + split_changes[4] = {'ff': {'s': 4, 't': 4, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(4)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'off' assert not task.running() split_changes[4] = { - 'since': 4, - 'till': 5, - 'splits': [make_simple_split('split1', 5, True, False, 'off', 'user', True)] + 'ff': {'s': 4, + 't': 5, + 'd': [make_simple_split('split1', 5, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[5] = {'since': 5, 'till': 5, 'splits': []} + split_changes[5] = {'ff': {'s': 5, 't': 5, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_DISABLED', 2)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'on' @@ -700,73 +724,73 @@ def test_streaming_status_changes(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll on push down req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push is up req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming disabled req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=5' + assert req.path == '/api/splitChanges?s=1.3&since=5&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -793,15 +817,17 @@ def test_server_closes_connection(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: { - 'since': 1, - 'till': 1, - 'splits': [] + 1: {'ff': { + 's': 1, + 't': 1, + 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []} } } @@ -836,12 +862,14 @@ def test_server_closes_connection(self): assert not task.running() time.sleep(1) - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] - } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} + } + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(2)) time.sleep(1) assert factory.client().get_treatment('maldo', 'split1') == 'off' @@ -860,12 +888,14 @@ def test_server_closes_connection(self): time.sleep(2) assert not task.running() - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] - } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} + } + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': [], + 'rbs': {'t': -1, 's': -1, 'd': []}}} sse_server.publish(make_split_change_event(3)) time.sleep(1) assert factory.client().get_treatment('maldo', 'split1') == 'on' @@ -921,67 +951,67 @@ def test_server_closes_connection(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll on retryable error handling req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth after connection breaks req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected again req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after new notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -1015,12 +1045,14 @@ def test_ably_errors_handling(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1057,12 +1089,14 @@ def test_ably_errors_handling(self): # Make a change in the BE but don't send the event. # We'll send an ignorable error and check it has nothing happened - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_ably_error_event(60000, 600)) time.sleep(1) @@ -1083,12 +1117,14 @@ def test_ably_errors_handling(self): assert not task.running() # Assert streaming is working properly - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] - } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} + } + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(3)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'on' @@ -1152,67 +1188,67 @@ def test_ably_errors_handling(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll retriable error req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth again req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push is up req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after non recoverable ably error req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -1239,12 +1275,14 @@ def test_change_number(mocker): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1312,15 +1350,17 @@ async def test_happiness(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: { - 'since': 1, - 'till': 1, - 'splits': [] + 1: {'ff': { + 's': 1, + 't': 1, + 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []} } } @@ -1353,23 +1393,27 @@ async def test_happiness(self): await asyncio.sleep(1) assert(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events[len(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events)-1]._type == StreamingEventTypes.SYNC_MODE_UPDATE.value) assert(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events[len(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events)-1]._data == SSESyncMode.STREAMING.value) - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] - } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} + } + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(2)) await asyncio.sleep(1) assert await factory.client().get_treatment('maldo', 'split1') == 'off' - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_split_with_segment('split2', 2, True, False, - 'off', 'user', 'off', 'segment1')] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_split_with_segment('split2', 2, True, False, + 'off', 'user', 'off', 'segment1')]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} segment_changes[('segment1', -1)] = { 'name': 'segment1', 'added': ['maldo'], @@ -1415,49 +1459,49 @@ async def test_happiness(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after second notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Segment change notification @@ -1495,12 +1539,14 @@ async def test_occupancy_flicker(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1538,13 +1584,13 @@ async def test_occupancy_flicker(self): # Make a change in the BE but don't send the event. # After dropping occupancy, the sdk should switch to polling # and perform a syncAll that gets this change - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} - + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_pri', 0)) sse_server.publish(make_occupancy('control_sec', 0)) await asyncio.sleep(2) @@ -1554,36 +1600,38 @@ async def test_occupancy_flicker(self): # We make another chagne in the BE and don't send the event. # We restore occupancy, and it should be fetched by the # sync all after streaming is restored. - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} - + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_pri', 1)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'on' assert not task.running() # Now we make another change and send an event so it's propagated - split_changes[3] = { - 'since': 3, - 'till': 4, - 'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)] + split_changes[3] = {'ff': { + 's': 3, + 't': 4, + 'd': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[4] = {'since': 4, 'till': 4, 'splits': []} + split_changes[4] = {'ff': {'s': 4, 't': 4, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(4)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'off' # Kill the split - split_changes[4] = { - 'since': 4, - 'till': 5, - 'splits': [make_simple_split('split1', 5, True, True, 'frula', 'user', False)] + split_changes[4] = {'ff': { + 's': 4, + 't': 5, + 'd': [make_simple_split('split1', 5, True, True, 'frula', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[5] = {'since': 5, 'till': 5, 'splits': []} + split_changes[5] = {'ff': {'s': 5, 't': 5, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_kill_event('split1', 'frula', 5)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'frula' @@ -1615,73 +1663,73 @@ async def test_occupancy_flicker(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after second notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Split kill req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=5' + assert req.path == '/api/splitChanges?s=1.3&since=5&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -1707,12 +1755,13 @@ async def test_start_without_occupancy(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1752,12 +1801,13 @@ async def test_start_without_occupancy(self): # Make a change in the BE but don't send the event. # After restoring occupancy, the sdk should switch to polling # and perform a syncAll that gets this change - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_sec', 1)) await asyncio.sleep(2) @@ -1791,43 +1841,43 @@ async def test_start_without_occupancy(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push down req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push restored req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Second iteration of previous syncAll req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -1853,12 +1903,13 @@ async def test_streaming_status_changes(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1899,12 +1950,13 @@ async def test_streaming_status_changes(self): # Make a change in the BE but don't send the event. # After dropping occupancy, the sdk should switch to polling # and perform a syncAll that gets this change - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_PAUSED', 1)) await asyncio.sleep(4) @@ -1915,12 +1967,13 @@ async def test_streaming_status_changes(self): # We make another chagne in the BE and don't send the event. # We restore occupancy, and it should be fetched by the # sync all after streaming is restored. - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_ENABLED', 2)) await asyncio.sleep(2) @@ -1929,24 +1982,26 @@ async def test_streaming_status_changes(self): assert not task.running() # Now we make another change and send an event so it's propagated - split_changes[3] = { - 'since': 3, - 'till': 4, - 'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)] + split_changes[3] = {'ff': { + 's': 3, + 't': 4, + 'd': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[4] = {'since': 4, 'till': 4, 'splits': []} + split_changes[4] = {'ff': {'s': 4, 't': 4, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(4)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'off' assert not task.running() - split_changes[4] = { - 'since': 4, - 'till': 5, - 'splits': [make_simple_split('split1', 5, True, False, 'off', 'user', True)] + split_changes[4] = {'ff': { + 's': 4, + 't': 5, + 'd': [make_simple_split('split1', 5, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[5] = {'since': 5, 'till': 5, 'splits': []} + split_changes[5] = {'ff': {'s': 5, 't': 5, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_DISABLED', 2)) await asyncio.sleep(2) @@ -1980,73 +2035,73 @@ async def test_streaming_status_changes(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll on push down req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push is up req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming disabled req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=5' + assert req.path == '/api/splitChanges?s=1.3&since=5&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -2072,16 +2127,13 @@ async def test_server_closes_connection(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: { - 'since': 1, - 'till': 1, - 'splits': [] - } + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -2114,12 +2166,13 @@ async def test_server_closes_connection(self): assert not task.running() await asyncio.sleep(1) - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(2)) await asyncio.sleep(1) assert await factory.client().get_treatment('maldo', 'split1') == 'off' @@ -2139,12 +2192,13 @@ async def test_server_closes_connection(self): await asyncio.sleep(2) assert not task.running() - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(3)) await asyncio.sleep(1) @@ -2201,67 +2255,67 @@ async def test_server_closes_connection(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll on retryable error handling req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth after connection breaks req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected again req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after new notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -2294,12 +2348,13 @@ async def test_ably_errors_handling(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -2338,12 +2393,13 @@ async def test_ably_errors_handling(self): # Make a change in the BE but don't send the event. # We'll send an ignorable error and check it has nothing happened - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_ably_error_event(60000, 600)) await asyncio.sleep(1) @@ -2366,12 +2422,13 @@ async def test_ably_errors_handling(self): assert not task.running() # Assert streaming is working properly - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(3)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'on' @@ -2434,67 +2491,67 @@ async def test_ably_errors_handling(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll retriable error req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth again req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push is up req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after non recoverable ably error req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -2520,12 +2577,13 @@ async def test_change_number(mocker): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} diff --git a/tests/models/grammar/test_matchers.py b/tests/models/grammar/test_matchers.py index bf582917..12de99e8 100644 --- a/tests/models/grammar/test_matchers.py +++ b/tests/models/grammar/test_matchers.py @@ -404,9 +404,9 @@ def test_matcher_behaviour(self, mocker): matcher = matchers.UserDefinedSegmentMatcher(self.raw) # Test that if the key if the storage wrapper finds the key in the segment, it matches. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True})}) is True + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True}, {}, {})}) is True # Test that if the key if the storage wrapper doesn't find the key in the segment, it fails. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False})}) is False + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False}, {}, {})}) is False def test_to_json(self): """Test that the object serializes to JSON properly.""" @@ -778,8 +778,8 @@ def test_matcher_behaviour(self, mocker): parsed = matchers.DependencyMatcher(cond_raw) evaluator = mocker.Mock(spec=Evaluator) - cond = condition.from_raw(splits_json["splitChange1_1"]["splits"][0]['conditions'][0]) - split = splits.from_raw(splits_json["splitChange1_1"]["splits"][0]) + cond = condition.from_raw(splits_json["splitChange1_1"]['ff']['d'][0]['conditions'][0]) + split = splits.from_raw(splits_json["splitChange1_1"]['ff']['d'][0]) evaluator.eval_with_context.return_value = {'treatment': 'on'} assert parsed.evaluate('SPLIT_2', {}, {'evaluator': evaluator, 'ec': [{'flags': [split], 'segment_memberships': {}}]}) is True diff --git a/tests/push/test_parser.py b/tests/push/test_parser.py index 6f4b57ff..faffb3d0 100644 --- a/tests/push/test_parser.py +++ b/tests/push/test_parser.py @@ -66,7 +66,7 @@ def test_event_parsing(self): assert parsed1.change_number == 1591996685190 assert parsed1.previous_change_number == 12 assert parsed1.compression == 2 - assert parsed1.feature_flag_definition == 'eJzEUtFu2kAQ/BU0z4d0hw2Be0MFRVGJIx' + assert parsed1.object_definition == 'eJzEUtFu2kAQ/BU0z4d0hw2Be0MFRVGJIx' e1 = make_message( 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits', @@ -77,7 +77,7 @@ def test_event_parsing(self): assert parsed1.change_number == 1591996685190 assert parsed1.previous_change_number == None assert parsed1.compression == None - assert parsed1.feature_flag_definition == None + assert parsed1.object_definition == None e2 = make_message( 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_segments', diff --git a/tests/storage/test_pluggable.py b/tests/storage/test_pluggable.py index 953a4510..a290d721 100644 --- a/tests/storage/test_pluggable.py +++ b/tests/storage/test_pluggable.py @@ -275,19 +275,19 @@ def test_get(self): for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorage(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split_name = splits_json['splitChange1_2']['splits'][0]['name'] + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split_name = splits_json['splitChange1_2']['ff']['d'][0]['name'] self.mock_adapter.set(pluggable_split_storage._prefix.format(feature_flag_name=split_name), split1.to_json()) - assert(pluggable_split_storage.get(split_name).to_json() == splits.from_raw(splits_json['splitChange1_2']['splits'][0]).to_json()) + assert(pluggable_split_storage.get(split_name).to_json() == splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]).to_json()) assert(pluggable_split_storage.get('not_existing') == None) def test_fetch_many(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorage(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split2_temp = splits_json['splitChange1_2']['splits'][0].copy() + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split2_temp = splits_json['splitChange1_2']['ff']['d'][0].copy() split2_temp['name'] = 'another_split' split2 = splits.from_raw(split2_temp) @@ -326,8 +326,8 @@ def test_get_split_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorage(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split2_temp = splits_json['splitChange1_2']['splits'][0].copy() + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split2_temp = splits_json['splitChange1_2']['ff']['d'][0].copy() split2_temp['name'] = 'another_split' split2 = splits.from_raw(split2_temp) self.mock_adapter.set(pluggable_split_storage._prefix.format(feature_flag_name=split1.name), split1.to_json()) @@ -411,12 +411,12 @@ async def test_get(self): for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorageAsync(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split_name = splits_json['splitChange1_2']['splits'][0]['name'] + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split_name = splits_json['splitChange1_2']['ff']['d'][0]['name'] await self.mock_adapter.set(pluggable_split_storage._prefix.format(feature_flag_name=split_name), split1.to_json()) split = await pluggable_split_storage.get(split_name) - assert(split.to_json() == splits.from_raw(splits_json['splitChange1_2']['splits'][0]).to_json()) + assert(split.to_json() == splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]).to_json()) assert(await pluggable_split_storage.get('not_existing') == None) @pytest.mark.asyncio @@ -424,8 +424,8 @@ async def test_fetch_many(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorageAsync(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split2_temp = splits_json['splitChange1_2']['splits'][0].copy() + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split2_temp = splits_json['splitChange1_2']['ff']['d'][0].copy() split2_temp['name'] = 'another_split' split2 = splits.from_raw(split2_temp) @@ -452,8 +452,8 @@ async def test_get_split_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorageAsync(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split2_temp = splits_json['splitChange1_2']['splits'][0].copy() + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split2_temp = splits_json['splitChange1_2']['ff']['d'][0].copy() split2_temp['name'] = 'another_split' split2 = splits.from_raw(split2_temp) await self.mock_adapter.set(pluggable_split_storage._prefix.format(feature_flag_name=split1.name), split1.to_json()) @@ -1386,11 +1386,11 @@ def test_get(self): for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs_name = rbsegments_json['segment1']['name'] + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs_name = rbsegments_json[0]['segment1']['name'] self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs_name), rbs1.to_json()) - assert(pluggable_rbs_storage.get(rbs_name).to_json() == rule_based_segments.from_raw(rbsegments_json['segment1']).to_json()) + assert(pluggable_rbs_storage.get(rbs_name).to_json() == rule_based_segments.from_raw(rbsegments_json[0]['segment1']).to_json()) assert(pluggable_rbs_storage.get('not_existing') == None) def test_get_change_number(self): @@ -1408,8 +1408,8 @@ def test_get_segment_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) @@ -1420,8 +1420,8 @@ def test_contains(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) @@ -1445,12 +1445,12 @@ async def test_get(self): for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs_name = rbsegments_json['segment1']['name'] + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs_name = rbsegments_json[0]['segment1']['name'] await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs_name), rbs1.to_json()) rbs = await pluggable_rbs_storage.get(rbs_name) - assert(rbs.to_json() == rule_based_segments.from_raw(rbsegments_json['segment1']).to_json()) + assert(rbs.to_json() == rule_based_segments.from_raw(rbsegments_json[0]['segment1']).to_json()) assert(await pluggable_rbs_storage.get('not_existing') == None) @pytest.mark.asyncio @@ -1470,8 +1470,8 @@ async def test_get_segment_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) @@ -1483,8 +1483,8 @@ async def test_contains(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) diff --git a/tests/sync/test_manager.py b/tests/sync/test_manager.py index b99c63a8..47ac3f01 100644 --- a/tests/sync/test_manager.py +++ b/tests/sync/test_manager.py @@ -24,7 +24,7 @@ from splitio.sync.event import EventSynchronizer from splitio.sync.synchronizer import Synchronizer, SynchronizerAsync, SplitTasks, SplitSynchronizers, RedisSynchronizer, RedisSynchronizerAsync from splitio.sync.manager import Manager, ManagerAsync, RedisManager, RedisManagerAsync -from splitio.storage import SplitStorage +from splitio.storage import SplitStorage, RuleBasedSegmentsStorage from splitio.api import APIException from splitio.client.util import SdkMetadata @@ -38,6 +38,7 @@ def test_error(self, mocker): mocker.Mock(), mocker.Mock()) storage = mocker.Mock(spec=SplitStorage) + rb_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) api = mocker.Mock() def run(x): @@ -46,7 +47,7 @@ def run(x): api.fetch_splits.side_effect = run storage.get_change_number.return_value = -1 - split_sync = SplitSynchronizer(api, storage) + split_sync = SplitSynchronizer(api, storage, rb_storage) synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) @@ -102,6 +103,7 @@ async def test_error(self, mocker): mocker.Mock(), mocker.Mock()) storage = mocker.Mock(spec=SplitStorage) + rb_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) api = mocker.Mock() async def run(x): @@ -112,7 +114,7 @@ async def get_change_number(): return -1 storage.get_change_number = get_change_number - split_sync = SplitSynchronizerAsync(api, storage) + split_sync = SplitSynchronizerAsync(api, storage, rb_storage) synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) diff --git a/tests/sync/test_segments_synchronizer.py b/tests/sync/test_segments_synchronizer.py index 6e8f7f78..5a6ef849 100644 --- a/tests/sync/test_segments_synchronizer.py +++ b/tests/sync/test_segments_synchronizer.py @@ -84,12 +84,12 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): assert segments_synchronizer.synchronize_segments() api_calls = [call for call in api.fetch_segment.mock_calls] - assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentB', -1, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentC', -1, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentB', 123, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentC', 123, FetchOptions(True, None, None, None)) in api_calls + assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentB', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls segment_put_calls = storage.put.mock_calls segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) @@ -128,8 +128,8 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer.synchronize_segment('segmentA') api_calls = [call for call in api.fetch_segment.mock_calls] - assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None)) in api_calls + assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls def test_synchronize_segment_cdn(self, mocker): """Test particular segment update cdn bypass.""" @@ -173,12 +173,12 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) segments_synchronizer.synchronize_segment('segmentA') - assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None)) in api.fetch_segment.mock_calls - assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None)) in api.fetch_segment.mock_calls + assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api.fetch_segment.mock_calls + assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None, None)) in api.fetch_segment.mock_calls segments_synchronizer._backoff = Backoff(1, 0.1) segments_synchronizer.synchronize_segment('segmentA', 12345) - assert mocker.call('segmentA', 12345, FetchOptions(True, 1234, None, None)) in api.fetch_segment.mock_calls + assert mocker.call('segmentA', 12345, FetchOptions(True, 1234, None, None, None)) in api.fetch_segment.mock_calls assert len(api.fetch_segment.mock_calls) == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) def test_recreate(self, mocker): @@ -287,12 +287,12 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) assert await segments_synchronizer.synchronize_segments() - assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None)) - assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None)) - assert (self.segment[2], self.change[2], self.options[2]) == ('segmentB', -1, FetchOptions(True, None, None, None)) - assert (self.segment[3], self.change[3], self.options[3]) == ('segmentB', 123, FetchOptions(True, None, None, None)) - assert (self.segment[4], self.change[4], self.options[4]) == ('segmentC', -1, FetchOptions(True, None, None, None)) - assert (self.segment[5], self.change[5], self.options[5]) == ('segmentC', 123, FetchOptions(True, None, None, None)) + assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None, None)) + assert (self.segment[2], self.change[2], self.options[2]) == ('segmentB', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[3], self.change[3], self.options[3]) == ('segmentB', 123, FetchOptions(True, None, None, None, None)) + assert (self.segment[4], self.change[4], self.options[4]) == ('segmentC', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[5], self.change[5], self.options[5]) == ('segmentC', 123, FetchOptions(True, None, None, None, None)) segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) for segment in self.segment_put: @@ -343,8 +343,8 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) await segments_synchronizer.synchronize_segment('segmentA') - assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None)) - assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None)) + assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None, None)) await segments_synchronizer.shutdown() @@ -403,12 +403,12 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) await segments_synchronizer.synchronize_segment('segmentA') - assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None)) - assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None)) + assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None, None)) segments_synchronizer._backoff = Backoff(1, 0.1) await segments_synchronizer.synchronize_segment('segmentA', 12345) - assert (self.segment[7], self.change[7], self.options[7]) == ('segmentA', 12345, FetchOptions(True, 1234, None, None)) + assert (self.segment[7], self.change[7], self.options[7]) == ('segmentA', 12345, FetchOptions(True, 1234, None, None, None)) assert len(self.segment) == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) await segments_synchronizer.shutdown() diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index ce1ade7e..3afb1f0d 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -1072,95 +1072,95 @@ def test_elements_sanitization(self, mocker): split_synchronizer = LocalSplitSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) # No changes when split structure is good - assert (split_synchronizer._sanitize_feature_flag_elements(splits_json["splitChange1_1"]["splits"]) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(splits_json["splitChange1_1"]['ff']['d']) == splits_json["splitChange1_1"]['ff']['d']) # test 'trafficTypeName' value None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficTypeName'] = None - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'trafficAllocation' value None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocation'] = None - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'trafficAllocation' valid value should not change - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocation'] = 50 assert (split_synchronizer._sanitize_feature_flag_elements(split) == split) # test 'trafficAllocation' invalid value should change - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocation'] = 110 - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'trafficAllocationSeed' is set to millisec epoch when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocationSeed'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['trafficAllocationSeed'] > 0) # test 'trafficAllocationSeed' is set to millisec epoch when 0 - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocationSeed'] = 0 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['trafficAllocationSeed'] > 0) # test 'seed' is set to millisec epoch when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['seed'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['seed'] > 0) # test 'seed' is set to millisec epoch when its 0 - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['seed'] = 0 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['seed'] > 0) # test 'status' is set to ACTIVE when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['status'] = None - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'status' is set to ACTIVE when incorrect - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['status'] = 'ww' - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test ''killed' is set to False when incorrect - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['killed'] = None - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'defaultTreatment' is set to on when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['defaultTreatment'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['defaultTreatment'] == 'control') # test 'defaultTreatment' is set to on when its empty - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['defaultTreatment'] = ' ' assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['defaultTreatment'] == 'control') # test 'changeNumber' is set to 0 when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['changeNumber'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['changeNumber'] == 0) # test 'changeNumber' is set to 0 when invalid - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['changeNumber'] = -33 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['changeNumber'] == 0) # test 'algo' is set to 2 when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['algo'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['algo'] == 2) # test 'algo' is set to 2 when higher than 2 - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['algo'] = 3 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['algo'] == 2) # test 'algo' is set to 2 when lower than 2 - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['algo'] = 1 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['algo'] == 2) @@ -1183,29 +1183,29 @@ def test_condition_sanitization(self, mocker): split_synchronizer = LocalSplitSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock()) # test missing all conditions with default rule set to 100% off - split = splits_json["splitChange1_1"]["splits"].copy() - target_split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() + target_split = splits_json["splitChange1_1"]['ff']['d'].copy() target_split[0]["conditions"][0]['partitions'][0]['size'] = 0 target_split[0]["conditions"][0]['partitions'][1]['size'] = 100 del split[0]["conditions"] assert (split_synchronizer._sanitize_feature_flag_elements(split) == target_split) # test missing ALL_KEYS condition matcher with default rule set to 100% off - split = splits_json["splitChange1_1"]["splits"].copy() - target_split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() + target_split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]["conditions"][0]["matcherGroup"]["matchers"][0]["matcherType"] = "IN_STR" target_split = split.copy() - target_split[0]["conditions"].append(splits_json["splitChange1_1"]["splits"][0]["conditions"][0]) + target_split[0]["conditions"].append(splits_json["splitChange1_1"]['ff']['d'][0]["conditions"][0]) target_split[0]["conditions"][1]['partitions'][0]['size'] = 0 target_split[0]["conditions"][1]['partitions'][1]['size'] = 100 assert (split_synchronizer._sanitize_feature_flag_elements(split) == target_split) # test missing ROLLOUT condition type with default rule set to 100% off - split = splits_json["splitChange1_1"]["splits"].copy() - target_split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() + target_split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]["conditions"][0]["conditionType"] = "NOT" target_split = split.copy() - target_split[0]["conditions"].append(splits_json["splitChange1_1"]["splits"][0]["conditions"][0]) + target_split[0]["conditions"].append(splits_json["splitChange1_1"]['ff']['d'][0]["conditions"][0]) target_split[0]["conditions"][1]['partitions'][0]['size'] = 0 target_split[0]["conditions"][1]['partitions'][1]['size'] = 100 assert (split_synchronizer._sanitize_feature_flag_elements(split) == target_split) diff --git a/tests/sync/test_synchronizer.py b/tests/sync/test_synchronizer.py index 1e89af66..42985e4c 100644 --- a/tests/sync/test_synchronizer.py +++ b/tests/sync/test_synchronizer.py @@ -12,11 +12,12 @@ from splitio.sync.segment import SegmentSynchronizer, SegmentSynchronizerAsync, LocalSegmentSynchronizer, LocalSegmentSynchronizerAsync from splitio.sync.impression import ImpressionSynchronizer, ImpressionSynchronizerAsync, ImpressionsCountSynchronizer, ImpressionsCountSynchronizerAsync from splitio.sync.event import EventSynchronizer, EventSynchronizerAsync -from splitio.storage import SegmentStorage, SplitStorage +from splitio.storage import SegmentStorage, SplitStorage, RuleBasedSegmentsStorage from splitio.api import APIException, APIUriException from splitio.models.splits import Split from splitio.models.segments import Segment -from splitio.storage.inmemmory import InMemorySegmentStorage, InMemorySplitStorage, InMemorySegmentStorageAsync, InMemorySplitStorageAsync +from splitio.storage.inmemmory import InMemorySegmentStorage, InMemorySplitStorage, InMemorySegmentStorageAsync, InMemorySplitStorageAsync, \ + InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync splits = [{ 'changeNumber': 123, @@ -61,11 +62,11 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - def run(x, c): + def run(x, y, c): raise APIException("something broke") api.fetch_splits.side_effect = run - split_sync = SplitSynchronizer(api, storage) + split_sync = SplitSynchronizer(api, storage, mocker.Mock()) split_synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) sychronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -87,11 +88,11 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - def run(x, c): + def run(x, y, c): raise APIException("something broke", 414) api.fetch_splits.side_effect = run - split_sync = SplitSynchronizer(api, storage) + split_sync = SplitSynchronizer(api, storage, mocker.Mock()) split_synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -108,7 +109,7 @@ def test_sync_all_failed_segments(self, mocker): split_sync = mocker.Mock(spec=SplitSynchronizer) split_sync.synchronize_splits.return_value = None - def run(x, y): + def run(x, y, c): raise APIException("something broke") api.fetch_segment.side_effect = run @@ -122,10 +123,11 @@ def run(x, y): def test_synchronize_splits(self, mocker): split_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() split_api = mocker.Mock() - split_api.fetch_splits.return_value = {'splits': splits, 'since': 123, - 'till': 123} - split_sync = SplitSynchronizer(split_api, split_storage) + split_api.fetch_splits.return_value = {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} + split_sync = SplitSynchronizer(split_api, split_storage, rbs_storage) segment_storage = InMemorySegmentStorage() segment_api = mocker.Mock() segment_api.fetch_segment.return_value = {'name': 'segmentA', 'added': ['key1', 'key2', @@ -148,10 +150,12 @@ def test_synchronize_splits(self, mocker): def test_synchronize_splits_calling_segment_sync_once(self, mocker): split_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() split_api = mocker.Mock() - split_api.fetch_splits.return_value = {'splits': splits, 'since': 123, - 'till': 123} - split_sync = SplitSynchronizer(split_api, split_storage) + split_api.fetch_splits.return_value = {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} + + split_sync = SplitSynchronizer(split_api, split_storage, rbs_storage) counts = {'segments': 0} def sync_segments(*_): @@ -171,6 +175,7 @@ def sync_segments(*_): def test_sync_all(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) split_storage.get_change_number.return_value = 123 split_storage.get_segment_names.return_value = ['segmentA'] class flag_set_filter(): @@ -183,9 +188,9 @@ def intersect(sets): split_storage.flag_set_filter.sorted_flag_sets = [] split_api = mocker.Mock() - split_api.fetch_splits.return_value = {'splits': splits, 'since': 123, - 'till': 123} - split_sync = SplitSynchronizer(split_api, split_storage) + split_api.fetch_splits.return_value = {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} + split_sync = SplitSynchronizer(split_api, split_storage, rbs_storage) segment_storage = mocker.Mock(spec=SegmentStorage) segment_storage.get_change_number.return_value = 123 @@ -389,6 +394,7 @@ class SynchronizerAsyncTests(object): async def test_sync_all_failed_splits(self, mocker): api = mocker.Mock() storage = mocker.Mock() + rbs_storage = mocker.Mock() class flag_set_filter(): def should_filter(): return False @@ -398,15 +404,16 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - async def run(x, c): + async def run(x, y, c): raise APIException("something broke") api.fetch_splits = run async def get_change_number(): return 1234 storage.get_change_number = get_change_number + rbs_storage.get_change_number = get_change_number - split_sync = SplitSynchronizerAsync(api, storage) + split_sync = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) sychronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -420,6 +427,7 @@ async def get_change_number(): async def test_sync_all_failed_splits_with_flagsets(self, mocker): api = mocker.Mock() storage = mocker.Mock() + rbs_storage = mocker.Mock() class flag_set_filter(): def should_filter(): return False @@ -432,12 +440,13 @@ def intersect(sets): async def get_change_number(): pass storage.get_change_number = get_change_number - - async def run(x, c): + rbs_storage.get_change_number = get_change_number + + async def run(x, y, c): raise APIException("something broke", 414) api.fetch_splits = run - split_sync = SplitSynchronizerAsync(api, storage) + split_sync = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -457,7 +466,7 @@ async def test_sync_all_failed_segments(self, mocker): split_sync = mocker.Mock(spec=SplitSynchronizer) split_sync.synchronize_splits.return_value = None - async def run(x, y): + async def run(x, y, c): raise APIException("something broke") api.fetch_segment = run @@ -477,14 +486,16 @@ async def get_segment_names(): @pytest.mark.asyncio async def test_synchronize_splits(self, mocker): split_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() split_api = mocker.Mock() - async def fetch_splits(change, options): - return {'splits': splits, 'since': 123, - 'till': 123} + async def fetch_splits(change, rb, options): + return {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} + split_api.fetch_splits = fetch_splits - split_sync = SplitSynchronizerAsync(split_api, split_storage) + split_sync = SplitSynchronizerAsync(split_api, split_storage, rbs_storage) segment_storage = InMemorySegmentStorageAsync() segment_api = mocker.Mock() @@ -518,17 +529,18 @@ async def fetch_segment(segment_name, change, options): @pytest.mark.asyncio async def test_synchronize_splits_calling_segment_sync_once(self, mocker): split_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() async def get_change_number(): return 123 split_storage.get_change_number = get_change_number split_api = mocker.Mock() - async def fetch_splits(change, options): - return {'splits': splits, 'since': 123, - 'till': 123} + async def fetch_splits(change, rb, options): + return {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} split_api.fetch_splits = fetch_splits - split_sync = SplitSynchronizerAsync(split_api, split_storage) + split_sync = SplitSynchronizerAsync(split_api, split_storage, rbs_storage) counts = {'segments': 0} segment_sync = mocker.Mock() @@ -552,6 +564,7 @@ async def segment_exist_in_storage(segment): @pytest.mark.asyncio async def test_sync_all(self, mocker): split_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() async def get_change_number(): return 123 split_storage.get_change_number = get_change_number @@ -576,11 +589,12 @@ def intersect(sets): split_storage.flag_set_filter.sorted_flag_sets = [] split_api = mocker.Mock() - async def fetch_splits(change, options): - return {'splits': splits, 'since': 123, 'till': 123} + async def fetch_splits(change, rb, options): + return {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} split_api.fetch_splits = fetch_splits - split_sync = SplitSynchronizerAsync(split_api, split_storage) + split_sync = SplitSynchronizerAsync(split_api, split_storage, rbs_storage) segment_storage = InMemorySegmentStorageAsync() async def get_change_number(segment): return 123 diff --git a/tests/sync/test_telemetry.py b/tests/sync/test_telemetry.py index c3aaac52..898216f8 100644 --- a/tests/sync/test_telemetry.py +++ b/tests/sync/test_telemetry.py @@ -169,7 +169,7 @@ def record_stats(*args, **kwargs): "spC": 1, "seC": 1, "skC": 0, - "ufs": {"sp": 3}, + "ufs": {"rbs": 0, "sp": 3}, "t": ['tag1'] }) @@ -294,6 +294,6 @@ async def record_stats(*args, **kwargs): "spC": 1, "seC": 1, "skC": 0, - "ufs": {"sp": 3}, + "ufs": {"rbs": 0, "sp": 3}, "t": ['tag1'] }) diff --git a/tests/tasks/test_segment_sync.py b/tests/tasks/test_segment_sync.py index 930d3f86..d5640709 100644 --- a/tests/tasks/test_segment_sync.py +++ b/tests/tasks/test_segment_sync.py @@ -62,7 +62,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 0 api = mocker.Mock() - fetch_options = FetchOptions(True, None, None, None) + fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment.side_effect = fetch_segment_mock segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) @@ -139,7 +139,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 0 api = mocker.Mock() - fetch_options = FetchOptions(True, None, None, None) + fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment.side_effect = fetch_segment_mock segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) @@ -238,7 +238,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 0 api = mocker.Mock() - fetch_options = FetchOptions(True, None, None, None) + fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment = fetch_segment_mock segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) @@ -326,7 +326,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 0 api = mocker.Mock() - fetch_options = FetchOptions(True, None, None, None) + fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment = fetch_segment_mock segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) diff --git a/tests/tasks/test_split_sync.py b/tests/tasks/test_split_sync.py index 9e9267e5..c1ec3620 100644 --- a/tests/tasks/test_split_sync.py +++ b/tests/tasks/test_split_sync.py @@ -6,7 +6,7 @@ from splitio.api import APIException from splitio.api.commons import FetchOptions from splitio.tasks import split_sync -from splitio.storage import SplitStorage +from splitio.storage import SplitStorage, RuleBasedSegmentsStorage from splitio.models.splits import Split from splitio.sync.split import SplitSynchronizer, SplitSynchronizerAsync from splitio.optional.loaders import asyncio @@ -53,6 +53,7 @@ class SplitSynchronizationTests(object): def test_normal_operation(self, mocker): """Test the normal operation flow.""" storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) def change_number_mock(): change_number_mock._calls += 1 @@ -78,22 +79,19 @@ def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { - 'splits': splits, - 'since': -1, - 'till': 123 + return {'ff': { + 'd': splits, + 's': -1, + 't': 123}, 'rbs': {'d': [], 't': -1, 's': -1} } else: - return { - 'splits': [], - 'since': 123, - 'till': 123 - } + return {'ff': {'d': [],'s': 123, 't': 123}, + 'rbs': {'d': [], 't': -1, 's': -1}} get_changes.called = 0 fetch_options = FetchOptions(True) api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) task = split_sync.SplitSynchronizationTask(split_synchronizer.synchronize_splits, 0.5) task.start() time.sleep(0.7) @@ -103,9 +101,9 @@ def get_changes(*args, **kwargs): stop_event.wait() assert not task.is_running() assert api.fetch_splits.mock_calls[0][1][0] == -1 - assert api.fetch_splits.mock_calls[0][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[0][1][2].cache_control_headers == True assert api.fetch_splits.mock_calls[1][1][0] == 123 - assert api.fetch_splits.mock_calls[1][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[1][1][2].cache_control_headers == True inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) @@ -114,20 +112,23 @@ def get_changes(*args, **kwargs): def test_that_errors_dont_stop_task(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) api = mocker.Mock() def run(x): run._calls += 1 if run._calls == 1: - return {'splits': [], 'since': -1, 'till': -1} + return {'ff': {'d': [],'s': -1, 't': -1}, + 'rbs': {'d': [], 't': -1, 's': -1}} if run._calls == 2: - return {'splits': [], 'since': -1, 'till': -1} + return {'ff': {'d': [],'s': -1, 't': -1}, + 'rbs': {'d': [], 't': -1, 's': -1}} raise APIException("something broke") run._calls = 0 api.fetch_splits.side_effect = run storage.get_change_number.return_value = -1 - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) task = split_sync.SplitSynchronizationTask(split_synchronizer.synchronize_splits, 0.5) task.start() time.sleep(0.1) @@ -144,6 +145,7 @@ class SplitSynchronizationAsyncTests(object): async def test_normal_operation(self, mocker): """Test the normal operation flow.""" storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) async def change_number_mock(): change_number_mock._calls += 1 @@ -152,6 +154,9 @@ async def change_number_mock(): return 123 change_number_mock._calls = 0 storage.get_change_number = change_number_mock + async def rb_change_number_mock(): + return -1 + rbs_storage.get_change_number = rb_change_number_mock class flag_set_filter(): def should_filter(): @@ -167,26 +172,20 @@ async def set_change_number(*_): pass change_number_mock._calls = 0 storage.set_change_number = set_change_number - + api = mocker.Mock() self.change_number = [] self.fetch_options = [] - async def get_changes(change_number, fetch_options): + async def get_changes(change_number, rb_change_number, fetch_options): self.change_number.append(change_number) self.fetch_options.append(fetch_options) get_changes.called += 1 if get_changes.called == 1: - return { - 'splits': splits, - 'since': -1, - 'till': 123 - } + return {'ff': {'d': splits,'s': -1, 't': 123}, + 'rbs': {'d': [], 't': -1, 's': -1}} else: - return { - 'splits': [], - 'since': 123, - 'till': 123 - } + return {'ff': {'d': [],'s': 123, 't': 123}, + 'rbs': {'d': [], 't': -1, 's': -1}} api.fetch_splits = get_changes get_changes.called = 0 self.inserted_split = None @@ -194,12 +193,15 @@ async def update(split, deleted, change_number): if len(split) > 0: self.inserted_split = split storage.update = update - + async def rbs_update(split, deleted, change_number): + pass + rbs_storage.update = rbs_update + fetch_options = FetchOptions(True) - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) task = split_sync.SplitSynchronizationTaskAsync(split_synchronizer.synchronize_splits, 0.5) task.start() - await asyncio.sleep(1) + await asyncio.sleep(2) assert task.is_running() await task.stop() assert not task.is_running() @@ -212,14 +214,17 @@ async def update(split, deleted, change_number): async def test_that_errors_dont_stop_task(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) api = mocker.Mock() async def run(x): run._calls += 1 if run._calls == 1: - return {'splits': [], 'since': -1, 'till': -1} + return {'ff': {'d': [],'s': -1, 't': -1}, + 'rbs': {'d': [], 't': -1, 's': -1}} if run._calls == 2: - return {'splits': [], 'since': -1, 'till': -1} + return {'ff': {'d': [],'s': -1, 't': -1}, + 'rbs': {'d': [], 't': -1, 's': -1}} raise APIException("something broke") run._calls = 0 api.fetch_splits = run @@ -228,7 +233,7 @@ async def get_change_number(): return -1 storage.get_change_number = get_change_number - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) task = split_sync.SplitSynchronizationTaskAsync(split_synchronizer.synchronize_splits, 0.5) task.start() await asyncio.sleep(0.1) From e070b9041ded6e524c83ca62941e97e70ac4cebd Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Wed, 19 Mar 2025 09:42:12 -0700 Subject: [PATCH 2/2] fixed tests --- tests/integration/__init__.py | 14 ++++++-------- tests/integration/test_client_e2e.py | 8 ++++---- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index 124f5b37..bec5cd6f 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -1,15 +1,13 @@ import copy -rbsegments_json = [{ - "segment1": {"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []} -}] +rbsegments_json = [{"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []}] split11 = {"ff": {"t": 1675443569027, "s": -1, "d": [ {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": False}, {"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202,"seed": -1442762199, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443537882,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}], "sets": ["set_1", "set_2"]}, {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": True} ]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} -split12 = {"ff": {"s": 1675443569027,"t": 167544376728, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split12 = {"ff": {"s": 1675443569027,"t": 1675443767284, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} split13 = {"ff": {"s": 1675443767288,"t": 1675443984594, "d": [ {"trafficTypeName": "user","name": "SPLIT_1","trafficAllocation": 100,"trafficAllocationSeed": -1780071202,"seed": -1442762199,"status": "ARCHIVED","killed": False,"defaultTreatment": "off","changeNumber": 1675443984594,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}]}, {"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443954220,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]} @@ -29,13 +27,13 @@ "splitChange1_2": split12, "splitChange1_3": split13, "splitChange2_1": {"ff": {"t": -1, "s": -1, "d": [{"name": "SPLIT_1","status": "ACTIVE","killed": False,"defaultTreatment": "off","configurations": {},"conditions": []}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, - "splitChange3_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, - "splitChange3_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange3_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"s": -1,"t": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange3_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"s": 1675443569027,"t": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, "splitChange4_1": split41, "splitChange4_2": split42, "splitChange4_3": split43, - "splitChange5_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, - "splitChange5_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569026,"till": 1675443569026}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange5_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"s": -1,"t": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange5_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"s": 1675443569026,"t": 1675443569026}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, "splitChange6_1": split61, "splitChange6_2": split62, "splitChange6_3": split63, diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index c8a6a666..b1f5d836 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -1020,7 +1020,7 @@ def test_localhost_json_e2e(self): assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' - assert client.get_treatment("key", "SPLIT_2", None) == 'on' #?? + assert client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange1_3']) self._synchronize_now() @@ -1078,7 +1078,7 @@ def test_localhost_json_e2e(self): self._synchronize_now() assert sorted(self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] - assert client.get_treatment("key", "SPLIT_2", None) == 'off' #?? + assert client.get_treatment("key", "SPLIT_2", None) == 'on' # Tests 6 self.factory._storages['splits'].update([], ['SPLIT_2'], -1) @@ -2744,7 +2744,7 @@ async def test_localhost_json_e2e(self): assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' - assert await client.get_treatment("key", "SPLIT_2", None) == 'on' #?? + assert await client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange1_3']) await self._synchronize_now() @@ -2802,7 +2802,7 @@ async def test_localhost_json_e2e(self): await self._synchronize_now() assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] - assert await client.get_treatment("key", "SPLIT_2", None) == 'off' #?? + assert await client.get_treatment("key", "SPLIT_2", None) == 'on' # Tests 6 await self.factory._storages['splits'].update([], ['SPLIT_2'], -1)