From 163afc83b0cf7386405867173d9d3a81b2cba51e Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Wed, 5 Mar 2025 16:01:15 -0800 Subject: [PATCH 01/56] model and memory storage --- splitio/models/grammar/condition.py | 10 +- splitio/models/grammar/matchers/__init__.py | 5 +- .../grammar/matchers/rule_based_segment.py | 48 ++++ splitio/models/rule_based_segments.py | 113 +++++++++ splitio/storage/__init__.py | 73 +++++- splitio/storage/inmemmory.py | 232 +++++++++++++++++- splitio/util/storage_helper.py | 29 ++- tests/models/test_rule_based_segments.py | 82 +++++++ tests/storage/test_inmemory_storage.py | 69 +++++- tests/sync/test_synchronizer.py | 2 - 10 files changed, 652 insertions(+), 11 deletions(-) create mode 100644 splitio/models/grammar/matchers/rule_based_segment.py create mode 100644 splitio/models/rule_based_segments.py create mode 100644 tests/models/test_rule_based_segments.py diff --git a/splitio/models/grammar/condition.py b/splitio/models/grammar/condition.py index 778c7867..79fdb928 100644 --- a/splitio/models/grammar/condition.py +++ b/splitio/models/grammar/condition.py @@ -119,10 +119,12 @@ def from_raw(raw_condition): :return: A condition object. :rtype: Condition """ - parsed_partitions = [ - partitions.from_raw(raw_partition) - for raw_partition in raw_condition['partitions'] - ] + parsed_partitions = [] + if raw_condition.get("partitions") is not None: + parsed_partitions = [ + partitions.from_raw(raw_partition) + for raw_partition in raw_condition['partitions'] + ] matcher_objects = [matchers.from_raw(x) for x in raw_condition['matcherGroup']['matchers']] diff --git a/splitio/models/grammar/matchers/__init__.py b/splitio/models/grammar/matchers/__init__.py index 34006e8b..def75626 100644 --- a/splitio/models/grammar/matchers/__init__.py +++ b/splitio/models/grammar/matchers/__init__.py @@ -10,6 +10,7 @@ from splitio.models.grammar.matchers.misc import BooleanMatcher, DependencyMatcher from splitio.models.grammar.matchers.semver import EqualToSemverMatcher, GreaterThanOrEqualToSemverMatcher, LessThanOrEqualToSemverMatcher, \ BetweenSemverMatcher, InListSemverMatcher +from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher MATCHER_TYPE_ALL_KEYS = 'ALL_KEYS' @@ -34,6 +35,7 @@ MATCHER_LESS_THAN_OR_EQUAL_TO_SEMVER = 'LESS_THAN_OR_EQUAL_TO_SEMVER' MATCHER_BETWEEN_SEMVER = 'BETWEEN_SEMVER' MATCHER_INLIST_SEMVER = 'IN_LIST_SEMVER' +MATCHER_IN_RULE_BASED_SEGMENT = 'IN_RULE_BASED_SEGMENT' _MATCHER_BUILDERS = { @@ -58,7 +60,8 @@ MATCHER_GREATER_THAN_OR_EQUAL_TO_SEMVER: GreaterThanOrEqualToSemverMatcher, MATCHER_LESS_THAN_OR_EQUAL_TO_SEMVER: LessThanOrEqualToSemverMatcher, MATCHER_BETWEEN_SEMVER: BetweenSemverMatcher, - MATCHER_INLIST_SEMVER: InListSemverMatcher + MATCHER_INLIST_SEMVER: InListSemverMatcher, + MATCHER_IN_RULE_BASED_SEGMENT: RuleBasedSegmentMatcher } def from_raw(raw_matcher): diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py new file mode 100644 index 00000000..0e0aa665 --- /dev/null +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -0,0 +1,48 @@ +"""Rule based segment matcher classes.""" +from splitio.models.grammar.matchers.base import Matcher + +class RuleBasedSegmentMatcher(Matcher): + + def _build(self, raw_matcher): + """ + Build an RuleBasedSegmentMatcher. + + :param raw_matcher: raw matcher as fetched from splitChanges response. + :type raw_matcher: dict + """ + self._rbs_segment_name = raw_matcher['userDefinedSegmentMatcherData']['segmentName'] + + def _match(self, key, attributes=None, context=None): + """ + Evaluate user input against a matcher and return whether the match is successful. + + :param key: User key. + :type key: str. + :param attributes: Custom user attributes. + :type attributes: dict. + :param context: Evaluation context + :type context: dict + + :returns: Wheter the match is successful. + :rtype: bool + """ + if self._rbs_segment_name == None: + return False + + # Check if rbs segment has exclusions + if context['ec'].segment_rbs_memberships.get(self._rbs_segment_name): + return False + + for parsed_condition in context['ec'].segment_rbs_conditions.get(self._rbs_segment_name): + if parsed_condition.matches(key, attributes, context): + return True + + return False + + def _add_matcher_specific_properties_to_json(self): + """Return UserDefinedSegment specific properties.""" + return { + 'userDefinedSegmentMatcherData': { + 'segmentName': self._rbs_segment_name + } + } \ No newline at end of file diff --git a/splitio/models/rule_based_segments.py b/splitio/models/rule_based_segments.py new file mode 100644 index 00000000..4ff548b2 --- /dev/null +++ b/splitio/models/rule_based_segments.py @@ -0,0 +1,113 @@ +"""RuleBasedSegment module.""" + +import logging + +from splitio.models import MatcherNotFoundException +from splitio.models.splits import _DEFAULT_CONDITIONS_TEMPLATE +from splitio.models.grammar import condition + +_LOGGER = logging.getLogger(__name__) + +class RuleBasedSegment(object): + """RuleBasedSegment object class.""" + + def __init__(self, name, traffic_yype_Name, change_number, status, conditions, excluded): + """ + Class constructor. + + :param name: Segment name. + :type name: str + :param traffic_yype_Name: traffic type name. + :type traffic_yype_Name: str + :param change_number: change number. + :type change_number: str + :param status: status. + :type status: str + :param conditions: List of conditions belonging to the segment. + :type conditions: List + :param excluded: excluded objects. + :type excluded: Excluded + """ + self._name = name + self._traffic_yype_Name = traffic_yype_Name + self._change_number = change_number + self._status = status + self._conditions = conditions + self._excluded = excluded + + @property + def name(self): + """Return segment name.""" + return self._name + + @property + def traffic_yype_Name(self): + """Return traffic type name.""" + return self._traffic_yype_Name + + @property + def change_number(self): + """Return change number.""" + return self._change_number + + @property + def status(self): + """Return status.""" + return self._status + + @property + def conditions(self): + """Return conditions.""" + return self._conditions + + @property + def excluded(self): + """Return excluded.""" + return self._excluded + +def from_raw(raw_rule_based_segment): + """ + Parse a Rule based segment from a JSON portion of splitChanges. + + :param raw_rule_based_segment: JSON object extracted from a splitChange's response + :type raw_rule_based_segment: dict + + :return: A parsed RuleBasedSegment object capable of performing evaluations. + :rtype: RuleBasedSegment + """ + try: + conditions = [condition.from_raw(c) for c in raw_rule_based_segment['conditions']] + except MatcherNotFoundException as e: + _LOGGER.error(str(e)) + _LOGGER.debug("Using default conditions template for feature flag: %s", raw_rule_based_segment['name']) + conditions = [condition.from_raw(_DEFAULT_CONDITIONS_TEMPLATE)] + return RuleBasedSegment( + raw_rule_based_segment['name'], + raw_rule_based_segment['trafficTypeName'], + raw_rule_based_segment['changeNumber'], + raw_rule_based_segment['status'], + conditions, + Excluded(raw_rule_based_segment['excluded']['keys'], raw_rule_based_segment['excluded']['segments']) + ) + +class Excluded(object): + + def __init__(self, keys, segments): + """ + Class constructor. + + :param keys: List of excluded keys in a rule based segment. + :type keys: List + :param segments: List of excluded segments in a rule based segment. + :type segments: List + """ + self._keys = keys + self._segments = segments + + def get_excluded_keys(self): + """Return excluded keys.""" + return self._keys + + def get_excluded_segments(self): + """Return excluded segments""" + return self._segments diff --git a/splitio/storage/__init__.py b/splitio/storage/__init__.py index cd3bf1a0..9178398a 100644 --- a/splitio/storage/__init__.py +++ b/splitio/storage/__init__.py @@ -354,4 +354,75 @@ def intersect(self, flag_sets): if not isinstance(flag_sets, set) or len(flag_sets) == 0: return False - return any(self.flag_sets.intersection(flag_sets)) \ No newline at end of file + return any(self.flag_sets.intersection(flag_sets)) + +class RuleBasedSegmentsStorage(object, metaclass=abc.ABCMeta): + """SplitRule based segment storage interface implemented as an abstract class.""" + + @abc.abstractmethod + def get(self, segment_name): + """ + Retrieve a rule based segment. + + :param segment_name: Name of the segment to fetch. + :type segment_name: str + + :rtype: str + """ + pass + + @abc.abstractmethod + def update(self, to_add, to_delete, new_change_number): + """ + Update rule based segment.. + + :param to_add: List of rule based segment. to add + :type to_add: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param to_delete: List of rule based segment. to delete + :type to_delete: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param new_change_number: New change number. + :type new_change_number: int + """ + pass + + @abc.abstractmethod + def get_change_number(self): + """ + Retrieve latest rule based segment change number. + + :rtype: int + """ + pass + + @abc.abstractmethod + def contains(self, segment_names): + """ + Return whether the traffic type exists in at least one rule based segment in cache. + + :param traffic_type_name: Traffic type to validate. + :type traffic_type_name: str + + :return: True if the traffic type is valid. False otherwise. + :rtype: bool + """ + pass + + @abc.abstractmethod + def get_segment_names(self): + """ + Retrieve a list of all excluded segments names. + + :return: List of segment names. + :rtype: list(str) + """ + pass + + @abc.abstractmethod + def get_large_segment_names(self): + """ + Retrieve a list of all excluded large segments names. + + :return: List of segment names. + :rtype: list(str) + """ + pass \ No newline at end of file diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index e4cf3da3..f7af8825 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -7,7 +7,7 @@ from splitio.models.segments import Segment from splitio.models.telemetry import HTTPErrors, HTTPLatencies, MethodExceptions, MethodLatencies, LastSynchronization, StreamingEvents, TelemetryConfig, TelemetryCounters, CounterConstants, \ HTTPErrorsAsync, HTTPLatenciesAsync, MethodExceptionsAsync, MethodLatenciesAsync, LastSynchronizationAsync, StreamingEventsAsync, TelemetryConfigAsync, TelemetryCountersAsync -from splitio.storage import FlagSetsFilter, SplitStorage, SegmentStorage, ImpressionStorage, EventStorage, TelemetryStorage +from splitio.storage import FlagSetsFilter, SplitStorage, SegmentStorage, ImpressionStorage, EventStorage, TelemetryStorage, RuleBasedSegmentsStorage from splitio.optional.loaders import asyncio MAX_SIZE_BYTES = 5 * 1024 * 1024 @@ -107,6 +107,236 @@ def remove_flag_set(self, flag_sets, feature_flag_name, should_filter): if self.flag_set_exist(flag_set) and len(self.get_flag_set(flag_set)) == 0 and not should_filter: self._remove_flag_set(flag_set) +class InMemoryRuleBasedSegmentStorage(RuleBasedSegmentsStorage): + """InMemory implementation of a feature flag storage base.""" + def __init__(self): + """Constructor.""" + self._lock = threading.RLock() + self._rule_based_segments = {} + self._change_number = -1 + + def get(self, segment_name): + """ + Retrieve a rule based segment. + + :param segment_name: Name of the segment to fetch. + :type segment_name: str + + :rtype: splitio.models.rule_based_segments.RuleBasedSegment + """ + with self._lock: + return self._rule_based_segments.get(segment_name) + + def update(self, to_add, to_delete, new_change_number): + """ + Update rule based segment. + + :param to_add: List of rule based segment. to add + :type to_add: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param to_delete: List of rule based segment. to delete + :type to_delete: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param new_change_number: New change number. + :type new_change_number: int + """ + [self._put(add_segment) for add_segment in to_add] + [self._remove(delete_segment) for delete_segment in to_delete] + self._set_change_number(new_change_number) + + def _put(self, rule_based_segment): + """ + Store a rule based segment. + + :param rule_based_segment: RuleBasedSegment object. + :type rule_based_segment: splitio.models.rule_based_segments.RuleBasedSegment + """ + with self._lock: + self._rule_based_segments[rule_based_segment.name] = rule_based_segment + + def _remove(self, segment_name): + """ + Remove a rule based segment. + + :param segment_name: Name of the rule based segment to remove. + :type segment_name: str + + :return: True if the rule based segment was found and removed. False otherwise. + :rtype: bool + """ + with self._lock: + rule_based_segment = self._rule_based_segments.get(segment_name) + if not rule_based_segment: + _LOGGER.warning("Tried to delete nonexistant Rule based segment %s. Skipping", segment_name) + return False + + self._rule_based_segments.pop(segment_name) + return True + + def get_change_number(self): + """ + Retrieve latest rule based segment change number. + + :rtype: int + """ + with self._lock: + return self._change_number + + def _set_change_number(self, new_change_number): + """ + Set the latest change number. + + :param new_change_number: New change number. + :type new_change_number: int + """ + with self._lock: + self._change_number = new_change_number + + def get_segment_names(self): + """ + Retrieve a list of all excluded segments names. + + :return: List of segment names. + :rtype: list(str) + """ + with self._lock: + return list(self._rule_based_segments.keys()) + + def get_large_segment_names(self): + """ + Retrieve a list of all excluded large segments names. + + :return: List of segment names. + :rtype: list(str) + """ + pass + + def contains(self, segment_names): + """ + Return whether the segment exists in storage + + :param segment_names: rule based segment name + :type segment_names: str + + :return: True if the segment exists. False otherwise. + :rtype: bool + """ + with self._lock: + return set(segment_names).issubset(self._rule_based_segments.keys()) + +class InMemoryRuleBasedSegmentStorageAsync(RuleBasedSegmentsStorage): + """InMemory implementation of a feature flag storage base.""" + def __init__(self): + """Constructor.""" + self._lock = asyncio.Lock() + self._rule_based_segments = {} + self._change_number = -1 + + async def get(self, segment_name): + """ + Retrieve a rule based segment. + + :param segment_name: Name of the segment to fetch. + :type segment_name: str + + :rtype: splitio.models.rule_based_segments.RuleBasedSegment + """ + async with self._lock: + return self._rule_based_segments.get(segment_name) + + async def update(self, to_add, to_delete, new_change_number): + """ + Update rule based segment. + + :param to_add: List of rule based segment. to add + :type to_add: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param to_delete: List of rule based segment. to delete + :type to_delete: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param new_change_number: New change number. + :type new_change_number: int + """ + [await self._put(add_segment) for add_segment in to_add] + [await self._remove(delete_segment) for delete_segment in to_delete] + await self._set_change_number(new_change_number) + + async def _put(self, rule_based_segment): + """ + Store a rule based segment. + + :param rule_based_segment: RuleBasedSegment object. + :type rule_based_segment: splitio.models.rule_based_segments.RuleBasedSegment + """ + async with self._lock: + self._rule_based_segments[rule_based_segment.name] = rule_based_segment + + async def _remove(self, segment_name): + """ + Remove a rule based segment. + + :param segment_name: Name of the rule based segment to remove. + :type segment_name: str + + :return: True if the rule based segment was found and removed. False otherwise. + :rtype: bool + """ + async with self._lock: + rule_based_segment = self._rule_based_segments.get(segment_name) + if not rule_based_segment: + _LOGGER.warning("Tried to delete nonexistant Rule based segment %s. Skipping", segment_name) + return False + + self._rule_based_segments.pop(segment_name) + return True + + async def get_change_number(self): + """ + Retrieve latest rule based segment change number. + + :rtype: int + """ + async with self._lock: + return self._change_number + + async def _set_change_number(self, new_change_number): + """ + Set the latest change number. + + :param new_change_number: New change number. + :type new_change_number: int + """ + async with self._lock: + self._change_number = new_change_number + + async def get_segment_names(self): + """ + Retrieve a list of all excluded segments names. + + :return: List of segment names. + :rtype: list(str) + """ + async with self._lock: + return list(self._rule_based_segments.keys()) + + async def get_large_segment_names(self): + """ + Retrieve a list of all excluded large segments names. + + :return: List of segment names. + :rtype: list(str) + """ + pass + + async def contains(self, segment_names): + """ + Return whether the segment exists in storage + + :param segment_names: rule based segment name + :type segment_names: str + + :return: True if the segment exists. False otherwise. + :rtype: bool + """ + async with self._lock: + return set(segment_names).issubset(self._rule_based_segments.keys()) + class InMemorySplitStorageBase(SplitStorage): """InMemory implementation of a feature flag storage base.""" diff --git a/splitio/util/storage_helper.py b/splitio/util/storage_helper.py index 8476cec2..b09a9f4e 100644 --- a/splitio/util/storage_helper.py +++ b/splitio/util/storage_helper.py @@ -1,6 +1,5 @@ """Storage Helper.""" import logging - from splitio.models import splits _LOGGER = logging.getLogger(__name__) @@ -33,6 +32,34 @@ def update_feature_flag_storage(feature_flag_storage, feature_flags, change_numb feature_flag_storage.update(to_add, to_delete, change_number) return segment_list +def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_segments, change_number): + """ + Update rule based segment storage from given list of rule based segments + + :param rule_based_segment_storage: rule based segment storage instance + :type rule_based_segment_storage: splitio.storage.RuleBasedSegmentStorage + :param rule_based_segments: rule based segment instance to validate. + :type rule_based_segments: splitio.models.rule_based_segments.RuleBasedSegment + :param: last change number + :type: int + + :return: segments list from excluded segments list + :rtype: list(str) + """ + segment_list = set() + to_add = [] + to_delete = [] + for rule_based_segment in rule_based_segments: + if rule_based_segment.status == "ACTIVE": + to_add.append(rule_based_segment) + segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) + else: + if rule_based_segment_storage.get(rule_based_segment.name) is not None: + to_delete.append(rule_based_segment.name) + + rule_based_segment_storage.update(to_add, to_delete, change_number) + return segment_list + async def update_feature_flag_storage_async(feature_flag_storage, feature_flags, change_number): """ Update feature flag storage from given list of feature flags while checking the flag set logic diff --git a/tests/models/test_rule_based_segments.py b/tests/models/test_rule_based_segments.py new file mode 100644 index 00000000..96cbdd30 --- /dev/null +++ b/tests/models/test_rule_based_segments.py @@ -0,0 +1,82 @@ +"""Split model tests module.""" +import copy + +from splitio.models import rule_based_segments +from splitio.models import splits +from splitio.models.grammar.condition import Condition + +class RuleBasedSegmentModelTests(object): + """Rule based segment model tests.""" + + raw = { + "changeNumber": 123, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": False, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + } + + def test_from_raw(self): + """Test split model parsing.""" + parsed = rule_based_segments.from_raw(self.raw) + assert isinstance(parsed, rule_based_segments.RuleBasedSegment) + assert parsed.change_number == 123 + assert parsed.name == 'sample_rule_based_segment' + assert parsed.status == 'ACTIVE' + assert len(parsed.conditions) == 1 + assert parsed.excluded.get_excluded_keys() == ["mauro@split.io","gaston@split.io"] + assert parsed.excluded.get_excluded_segments() == [] + conditions = parsed.conditions[0].to_json() + assert conditions['matcherGroup']['matchers'][0] == { + 'betweenMatcherData': None, 'booleanMatcherData': None, 'dependencyMatcherData': None, + 'stringMatcherData': None, 'unaryNumericMatcherData': None, 'userDefinedSegmentMatcherData': None, + "keySelector": { + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": False, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + + def test_incorrect_matcher(self): + """Test incorrect matcher in split model parsing.""" + rbs = copy.deepcopy(self.raw) + rbs['conditions'][0]['matcherGroup']['matchers'][0]['matcherType'] = 'INVALID_MATCHER' + rbs = rule_based_segments.from_raw(rbs) + assert rbs.conditions[0].to_json() == splits._DEFAULT_CONDITIONS_TEMPLATE + + # using multiple conditions + rbs = copy.deepcopy(self.raw) + rbs['conditions'].append(rbs['conditions'][0]) + rbs['conditions'][0]['matcherGroup']['matchers'][0]['matcherType'] = 'INVALID_MATCHER' + parsed = rule_based_segments.from_raw(rbs) + assert parsed.conditions[0].to_json() == splits._DEFAULT_CONDITIONS_TEMPLATE \ No newline at end of file diff --git a/tests/storage/test_inmemory_storage.py b/tests/storage/test_inmemory_storage.py index bf38ed57..1bf2f3de 100644 --- a/tests/storage/test_inmemory_storage.py +++ b/tests/storage/test_inmemory_storage.py @@ -11,7 +11,8 @@ from splitio.engine.telemetry import TelemetryStorageProducer, TelemetryStorageProducerAsync from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySegmentStorage, InMemorySegmentStorageAsync, InMemorySplitStorageAsync, \ InMemoryImpressionStorage, InMemoryEventStorage, InMemoryTelemetryStorage, InMemoryImpressionStorageAsync, InMemoryEventStorageAsync, \ - InMemoryTelemetryStorageAsync, FlagSets + InMemoryTelemetryStorageAsync, FlagSets, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync +from splitio.models.rule_based_segments import RuleBasedSegment class FlagSetsFilterTests(object): """Flag sets filter storage tests.""" @@ -1807,3 +1808,69 @@ async def test_pop_latencies(self): assert(sync_latency == {'httpLatencies': {'split': [4] + [0] * 22, 'segment': [4] + [0] * 22, 'impression': [2] + [0] * 22, 'impressionCount': [2] + [0] * 22, 'event': [2] + [0] * 22, 'telemetry': [3] + [0] * 22, 'token': [3] + [0] * 22}}) + +class InMemoryRuleBasedSegmentStorageTests(object): + """In memory rule based segment storage test cases.""" + + def test_storing_retrieving_segments(self, mocker): + """Test storing and retrieving splits works.""" + rbs_storage = InMemoryRuleBasedSegmentStorage() + + segment1 = mocker.Mock(spec=RuleBasedSegment) + name_property = mocker.PropertyMock() + name_property.return_value = 'some_segment' + type(segment1).name = name_property + + segment2 = mocker.Mock() + name2_prop = mocker.PropertyMock() + name2_prop.return_value = 'segment2' + type(segment2).name = name2_prop + + rbs_storage.update([segment1, segment2], [], -1) + assert rbs_storage.get('some_segment') == segment1 + assert rbs_storage.get_segment_names() == ['some_segment', 'segment2'] + assert rbs_storage.get('nonexistant_segment') is None + + rbs_storage.update([], ['some_segment'], -1) + assert rbs_storage.get('some_segment') is None + + def test_store_get_changenumber(self): + """Test that storing and retrieving change numbers works.""" + storage = InMemoryRuleBasedSegmentStorage() + assert storage.get_change_number() == -1 + storage.update([], [], 5) + assert storage.get_change_number() == 5 + +class InMemoryRuleBasedSegmentStorageAsyncTests(object): + """In memory rule based segment storage test cases.""" + + @pytest.mark.asyncio + async def test_storing_retrieving_segments(self, mocker): + """Test storing and retrieving splits works.""" + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + + segment1 = mocker.Mock(spec=RuleBasedSegment) + name_property = mocker.PropertyMock() + name_property.return_value = 'some_segment' + type(segment1).name = name_property + + segment2 = mocker.Mock() + name2_prop = mocker.PropertyMock() + name2_prop.return_value = 'segment2' + type(segment2).name = name2_prop + + await rbs_storage.update([segment1, segment2], [], -1) + assert await rbs_storage.get('some_segment') == segment1 + assert await rbs_storage.get_segment_names() == ['some_segment', 'segment2'] + assert await rbs_storage.get('nonexistant_segment') is None + + await rbs_storage.update([], ['some_segment'], -1) + assert await rbs_storage.get('some_segment') is None + + @pytest.mark.asyncio + async def test_store_get_changenumber(self): + """Test that storing and retrieving change numbers works.""" + storage = InMemoryRuleBasedSegmentStorageAsync() + assert await storage.get_change_number() == -1 + await storage.update([], [], 5) + assert await storage.get_change_number() == 5 diff --git a/tests/sync/test_synchronizer.py b/tests/sync/test_synchronizer.py index 8e10d771..b2ef9fa0 100644 --- a/tests/sync/test_synchronizer.py +++ b/tests/sync/test_synchronizer.py @@ -1,6 +1,4 @@ """Synchronizer tests.""" - -from turtle import clear import unittest.mock as mock import pytest From a64a06efee623d87de12d5639668d839575db011 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Wed, 5 Mar 2025 20:17:02 -0800 Subject: [PATCH 02/56] update storage helper --- splitio/util/storage_helper.py | 28 ++++++++++++++ tests/storage/test_inmemory_storage.py | 53 ++++++++++++++++++++++++++ 2 files changed, 81 insertions(+) diff --git a/splitio/util/storage_helper.py b/splitio/util/storage_helper.py index b09a9f4e..f547a701 100644 --- a/splitio/util/storage_helper.py +++ b/splitio/util/storage_helper.py @@ -88,6 +88,34 @@ async def update_feature_flag_storage_async(feature_flag_storage, feature_flags, await feature_flag_storage.update(to_add, to_delete, change_number) return segment_list +async def update_rule_based_segment_storage_async(rule_based_segment_storage, rule_based_segments, change_number): + """ + Update rule based segment storage from given list of rule based segments + + :param rule_based_segment_storage: rule based segment storage instance + :type rule_based_segment_storage: splitio.storage.RuleBasedSegmentStorage + :param rule_based_segments: rule based segment instance to validate. + :type rule_based_segments: splitio.models.rule_based_segments.RuleBasedSegment + :param: last change number + :type: int + + :return: segments list from excluded segments list + :rtype: list(str) + """ + segment_list = set() + to_add = [] + to_delete = [] + for rule_based_segment in rule_based_segments: + if rule_based_segment.status == "ACTIVE": + to_add.append(rule_based_segment) + segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) + else: + if await rule_based_segment_storage.get(rule_based_segment.name) is not None: + to_delete.append(rule_based_segment.name) + + await rule_based_segment_storage.update(to_add, to_delete, change_number) + return segment_list + def get_valid_flag_sets(flag_sets, flag_set_filter): """ Check each flag set in given array, return it if exist in a given config flag set array, if config array is empty return all diff --git a/tests/storage/test_inmemory_storage.py b/tests/storage/test_inmemory_storage.py index 1bf2f3de..9c5b6ed2 100644 --- a/tests/storage/test_inmemory_storage.py +++ b/tests/storage/test_inmemory_storage.py @@ -2,6 +2,7 @@ # pylint: disable=no-self-use import random import pytest +import copy from splitio.models.splits import Split from splitio.models.segments import Segment @@ -13,6 +14,7 @@ InMemoryImpressionStorage, InMemoryEventStorage, InMemoryTelemetryStorage, InMemoryImpressionStorageAsync, InMemoryEventStorageAsync, \ InMemoryTelemetryStorageAsync, FlagSets, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.models.rule_based_segments import RuleBasedSegment +from splitio.models import rule_based_segments class FlagSetsFilterTests(object): """Flag sets filter storage tests.""" @@ -1840,6 +1842,31 @@ def test_store_get_changenumber(self): assert storage.get_change_number() == -1 storage.update([], [], 5) assert storage.get_change_number() == 5 + + def test_contains(self): + raw = { + "changeNumber": 123, + "name": "segment1", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":[], + "segments":[] + }, + "conditions": [] + } + segment1 = rule_based_segments.from_raw(raw) + raw2 = copy.deepcopy(raw) + raw2["name"] = "segment2" + segment2 = rule_based_segments.from_raw(raw2) + raw3 = copy.deepcopy(raw) + raw3["name"] = "segment3" + segment3 = rule_based_segments.from_raw(raw3) + storage = InMemoryRuleBasedSegmentStorage() + storage.update([segment1, segment2, segment3], [], -1) + assert storage.contains(["segment1"]) + assert storage.contains(["segment1", "segment3"]) + assert not storage.contains(["segment5"]) class InMemoryRuleBasedSegmentStorageAsyncTests(object): """In memory rule based segment storage test cases.""" @@ -1874,3 +1901,29 @@ async def test_store_get_changenumber(self): assert await storage.get_change_number() == -1 await storage.update([], [], 5) assert await storage.get_change_number() == 5 + + @pytest.mark.asyncio + async def test_contains(self): + raw = { + "changeNumber": 123, + "name": "segment1", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":[], + "segments":[] + }, + "conditions": [] + } + segment1 = rule_based_segments.from_raw(raw) + raw2 = copy.deepcopy(raw) + raw2["name"] = "segment2" + segment2 = rule_based_segments.from_raw(raw2) + raw3 = copy.deepcopy(raw) + raw3["name"] = "segment3" + segment3 = rule_based_segments.from_raw(raw3) + storage = InMemoryRuleBasedSegmentStorageAsync() + await storage.update([segment1, segment2, segment3], [], -1) + assert await storage.contains(["segment1"]) + assert await storage.contains(["segment1", "segment3"]) + assert not await storage.contains(["segment5"]) From c07651e1ae432f796e944f1b8540ac159a412e25 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Thu, 6 Mar 2025 10:27:56 -0800 Subject: [PATCH 03/56] polish --- splitio/storage/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/splitio/storage/__init__.py b/splitio/storage/__init__.py index 9178398a..079ee863 100644 --- a/splitio/storage/__init__.py +++ b/splitio/storage/__init__.py @@ -397,12 +397,12 @@ def get_change_number(self): @abc.abstractmethod def contains(self, segment_names): """ - Return whether the traffic type exists in at least one rule based segment in cache. + Return whether the segments exists in rule based segment in cache. - :param traffic_type_name: Traffic type to validate. - :type traffic_type_name: str + :param segment_names: segment name to validate. + :type segment_names: str - :return: True if the traffic type is valid. False otherwise. + :return: True if segment names exists. False otherwise. :rtype: bool """ pass From 06a84f76469f2d83c6b10688b0593043faf78d42 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Fri, 7 Mar 2025 17:25:14 -0300 Subject: [PATCH 04/56] update evaluator --- splitio/client/client.py | 4 +- splitio/engine/evaluator.py | 81 +++++++++++++--- tests/engine/test_evaluator.py | 169 +++++++++++++++++++++++++++++++-- 3 files changed, 229 insertions(+), 25 deletions(-) diff --git a/splitio/client/client.py b/splitio/client/client.py index d4c37fa4..8e71030e 100644 --- a/splitio/client/client.py +++ b/splitio/client/client.py @@ -201,7 +201,7 @@ def __init__(self, factory, recorder, labels_enabled=True): :rtype: Client """ ClientBase.__init__(self, factory, recorder, labels_enabled) - self._context_factory = EvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments')) + self._context_factory = EvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments'), factory._get_storage('rule_based_segments')) def destroy(self): """ @@ -668,7 +668,7 @@ def __init__(self, factory, recorder, labels_enabled=True): :rtype: Client """ ClientBase.__init__(self, factory, recorder, labels_enabled) - self._context_factory = AsyncEvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments')) + self._context_factory = AsyncEvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments'), factory._get_storage('rule_based_segments')) async def destroy(self): """ diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index f913ebba..80a75eec 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -6,10 +6,11 @@ from splitio.models.grammar.condition import ConditionType from splitio.models.grammar.matchers.misc import DependencyMatcher from splitio.models.grammar.matchers.keys import UserDefinedSegmentMatcher +from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher from splitio.optional.loaders import asyncio CONTROL = 'control' -EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships']) +EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships', 'segment_rbs_memberships', 'segment_rbs_conditions']) _LOGGER = logging.getLogger(__name__) @@ -98,9 +99,10 @@ def _treatment_for_flag(self, flag, key, bucketing, attributes, ctx): class EvaluationDataFactory: - def __init__(self, split_storage, segment_storage): + def __init__(self, split_storage, segment_storage, rbs_segment_storage): self._flag_storage = split_storage self._segment_storage = segment_storage + self._rbs_segment_storage = rbs_segment_storage def context_for(self, key, feature_names): """ @@ -114,28 +116,50 @@ def context_for(self, key, feature_names): pending = set(feature_names) splits = {} pending_memberships = set() + pending_rbs_memberships = set() while pending: fetched = self._flag_storage.fetch_many(list(pending)) features = filter_missing(fetched) splits.update(features) pending = set() for feature in features.values(): - cf, cs = get_dependencies(feature) + cf, cs, crbs = get_dependencies(feature) pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) - - return EvaluationContext(splits, { - segment: self._segment_storage.segment_contains(segment, key) - for segment in pending_memberships - }) - + pending_rbs_memberships.update(crbs) + + rbs_segment_memberships = {} + rbs_segment_conditions = {} + key_membership = False + segment_memberhsip = False + for rbs_segment in pending_rbs_memberships: + key_membership = key in self._rbs_segment_storage.get(rbs_segment).excluded.get_excluded_keys() + segment_memberhsip = False + for segment_name in self._rbs_segment_storage.get(rbs_segment).excluded.get_excluded_segments(): + if self._segment_storage.segment_contains(segment_name, key): + segment_memberhsip = True + break + + rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) + if not (segment_memberhsip or key_membership): + rbs_segment_conditions.update({rbs_segment: [condition for condition in self._rbs_segment_storage.get(rbs_segment).conditions]}) + + return EvaluationContext( + splits, + { segment: self._segment_storage.segment_contains(segment, key) + for segment in pending_memberships + }, + rbs_segment_memberships, + rbs_segment_conditions + ) class AsyncEvaluationDataFactory: - def __init__(self, split_storage, segment_storage): + def __init__(self, split_storage, segment_storage, rbs_segment_storage): self._flag_storage = split_storage self._segment_storage = segment_storage - + self._rbs_segment_storage = rbs_segment_storage + async def context_for(self, key, feature_names): """ Recursively iterate & fetch all data required to evaluate these flags. @@ -148,23 +172,47 @@ async def context_for(self, key, feature_names): pending = set(feature_names) splits = {} pending_memberships = set() + pending_rbs_memberships = set() while pending: fetched = await self._flag_storage.fetch_many(list(pending)) features = filter_missing(fetched) splits.update(features) pending = set() for feature in features.values(): - cf, cs = get_dependencies(feature) + cf, cs, crbs = get_dependencies(feature) pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) - + pending_rbs_memberships.update(crbs) + segment_names = list(pending_memberships) segment_memberships = await asyncio.gather(*[ self._segment_storage.segment_contains(segment, key) for segment in segment_names ]) - return EvaluationContext(splits, dict(zip(segment_names, segment_memberships))) + rbs_segment_memberships = {} + rbs_segment_conditions = {} + key_membership = False + segment_memberhsip = False + for rbs_segment in pending_rbs_memberships: + rbs_segment_obj = await self._rbs_segment_storage.get(rbs_segment) + key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() + segment_memberhsip = False + for segment_name in rbs_segment_obj.excluded.get_excluded_segments(): + if await self._segment_storage.segment_contains(segment_name, key): + segment_memberhsip = True + break + + rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) + if not (segment_memberhsip or key_membership): + rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) + + return EvaluationContext( + splits, + dict(zip(segment_names, segment_memberships)), + rbs_segment_memberships, + rbs_segment_conditions + ) def get_dependencies(feature): @@ -173,14 +221,17 @@ def get_dependencies(feature): """ feature_names = [] segment_names = [] + rbs_segment_names = [] for condition in feature.conditions: for matcher in condition.matchers: + if isinstance(matcher,RuleBasedSegmentMatcher): + rbs_segment_names.append(matcher._rbs_segment_name) if isinstance(matcher,UserDefinedSegmentMatcher): segment_names.append(matcher._segment_name) elif isinstance(matcher, DependencyMatcher): feature_names.append(matcher._split_name) - return feature_names, segment_names + return feature_names, segment_names, rbs_segment_names def filter_missing(features): return {k: v for (k, v) in features.items() if v is not None} diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 67c7387d..6268ad1d 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -2,12 +2,108 @@ import logging import pytest -from splitio.models.splits import Split +from splitio.models.splits import Split, Status from splitio.models.grammar.condition import Condition, ConditionType from splitio.models.impressions import Label +from splitio.models.grammar import condition +from splitio.models import rule_based_segments from splitio.engine import evaluator, splitters from splitio.engine.evaluator import EvaluationContext +from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySegmentStorage, InMemoryRuleBasedSegmentStorage, \ + InMemorySplitStorageAsync, InMemorySegmentStorageAsync, InMemoryRuleBasedSegmentStorageAsync +from splitio.engine.evaluator import EvaluationDataFactory, AsyncEvaluationDataFactory +rbs_raw = { + "changeNumber": 123, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": False, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] +} + +split_conditions = [ + condition.from_raw({ + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }), + condition.from_raw({ + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "ALL_KEYS", + "negate": False + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + }) +] + class EvaluatorTests(object): """Test evaluator behavior.""" @@ -27,7 +123,7 @@ def test_evaluate_treatment_killed_split(self, mocker): mocked_split.killed = True mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'off' assert result['configurations'] == '{"some_property": 123}' @@ -45,7 +141,7 @@ def test_evaluate_treatment_ok(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == '{"some_property": 123}' @@ -54,7 +150,6 @@ def test_evaluate_treatment_ok(self, mocker): assert mocked_split.get_configurations_for.mock_calls == [mocker.call('on')] assert result['impressions_disabled'] == mocked_split.impressions_disabled - def test_evaluate_treatment_ok_no_config(self, mocker): """Test that a killed split returns the default treatment.""" e = self._build_evaluator_with_mocks(mocker) @@ -65,7 +160,7 @@ def test_evaluate_treatment_ok_no_config(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == None @@ -92,7 +187,7 @@ def test_evaluate_treatments(self, mocker): mocked_split2.change_number = 123 mocked_split2.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set()) + ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) results = e.eval_many_with_context('some_key', 'some_bucketing_key', ['feature2', 'feature4'], {}, ctx) result = results['feature4'] assert result['configurations'] == None @@ -115,7 +210,7 @@ def test_get_gtreatment_for_split_no_condition_matches(self, mocker): mocked_split.change_number = '123' mocked_split.conditions = [] mocked_split.get_configurations_for = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) assert e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, ctx) == ( 'off', Label.NO_CONDITION_MATCHED @@ -132,6 +227,64 @@ def test_get_gtreatment_for_split_non_rollout(self, mocker): mocked_split = mocker.Mock(spec=Split) mocked_split.killed = False mocked_split.conditions = [mocked_condition_1] - treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None)) + treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None, None)) assert treatment == 'on' assert label == 'some_label' + + def test_evaluate_treatment_with_rule_based_segment(self, mocker): + """Test that a non-killed split returns the appropriate treatment.""" + e = evaluator.Evaluator(splitters.Splitter()) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': False}, segment_rbs_conditions={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw).conditions}) + result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) + assert result['treatment'] == 'on' + + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': True}, segment_rbs_conditions={'sample_rule_based_segment': []}) + result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) + assert result['treatment'] == 'off' + +class EvaluationDataFactoryTests(object): + """Test evaluation factory class.""" + + def test_get_context(self): + """Test context.""" + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + flag_storage = InMemorySplitStorage([]) + segment_storage = InMemorySegmentStorage() + rbs_segment_storage = InMemoryRuleBasedSegmentStorage() + flag_storage.update([mocked_split], [], -1) + rbs = rule_based_segments.from_raw(rbs_raw) + rbs_segment_storage.update([rbs], [], -1) + + eval_factory = EvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) + ec = eval_factory.context_for('bilal@split.io', ['some']) + assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} + assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + + ec = eval_factory.context_for('mauro@split.io', ['some']) + assert ec.segment_rbs_conditions == {} + assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} + +class EvaluationDataFactoryAsyncTests(object): + """Test evaluation factory class.""" + + @pytest.mark.asyncio + async def test_get_context(self): + """Test context.""" + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + flag_storage = InMemorySplitStorageAsync([]) + segment_storage = InMemorySegmentStorageAsync() + rbs_segment_storage = InMemoryRuleBasedSegmentStorageAsync() + await flag_storage.update([mocked_split], [], -1) + rbs = rule_based_segments.from_raw(rbs_raw) + await rbs_segment_storage.update([rbs], [], -1) + + eval_factory = AsyncEvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) + ec = await eval_factory.context_for('bilal@split.io', ['some']) + assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} + assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + + ec = await eval_factory.context_for('mauro@split.io', ['some']) + assert ec.segment_rbs_conditions == {} + assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} From 8228d942776e88ecd0f12d42553262ab386ee2f3 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Fri, 7 Mar 2025 17:29:44 -0300 Subject: [PATCH 05/56] Revert "update evaluator" This reverts commit 06a84f76469f2d83c6b10688b0593043faf78d42. --- splitio/client/client.py | 4 +- splitio/engine/evaluator.py | 81 +++------------- tests/engine/test_evaluator.py | 169 ++------------------------------- 3 files changed, 25 insertions(+), 229 deletions(-) diff --git a/splitio/client/client.py b/splitio/client/client.py index 8e71030e..d4c37fa4 100644 --- a/splitio/client/client.py +++ b/splitio/client/client.py @@ -201,7 +201,7 @@ def __init__(self, factory, recorder, labels_enabled=True): :rtype: Client """ ClientBase.__init__(self, factory, recorder, labels_enabled) - self._context_factory = EvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments'), factory._get_storage('rule_based_segments')) + self._context_factory = EvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments')) def destroy(self): """ @@ -668,7 +668,7 @@ def __init__(self, factory, recorder, labels_enabled=True): :rtype: Client """ ClientBase.__init__(self, factory, recorder, labels_enabled) - self._context_factory = AsyncEvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments'), factory._get_storage('rule_based_segments')) + self._context_factory = AsyncEvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments')) async def destroy(self): """ diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index 80a75eec..f913ebba 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -6,11 +6,10 @@ from splitio.models.grammar.condition import ConditionType from splitio.models.grammar.matchers.misc import DependencyMatcher from splitio.models.grammar.matchers.keys import UserDefinedSegmentMatcher -from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher from splitio.optional.loaders import asyncio CONTROL = 'control' -EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships', 'segment_rbs_memberships', 'segment_rbs_conditions']) +EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships']) _LOGGER = logging.getLogger(__name__) @@ -99,10 +98,9 @@ def _treatment_for_flag(self, flag, key, bucketing, attributes, ctx): class EvaluationDataFactory: - def __init__(self, split_storage, segment_storage, rbs_segment_storage): + def __init__(self, split_storage, segment_storage): self._flag_storage = split_storage self._segment_storage = segment_storage - self._rbs_segment_storage = rbs_segment_storage def context_for(self, key, feature_names): """ @@ -116,50 +114,28 @@ def context_for(self, key, feature_names): pending = set(feature_names) splits = {} pending_memberships = set() - pending_rbs_memberships = set() while pending: fetched = self._flag_storage.fetch_many(list(pending)) features = filter_missing(fetched) splits.update(features) pending = set() for feature in features.values(): - cf, cs, crbs = get_dependencies(feature) + cf, cs = get_dependencies(feature) pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) - pending_rbs_memberships.update(crbs) - - rbs_segment_memberships = {} - rbs_segment_conditions = {} - key_membership = False - segment_memberhsip = False - for rbs_segment in pending_rbs_memberships: - key_membership = key in self._rbs_segment_storage.get(rbs_segment).excluded.get_excluded_keys() - segment_memberhsip = False - for segment_name in self._rbs_segment_storage.get(rbs_segment).excluded.get_excluded_segments(): - if self._segment_storage.segment_contains(segment_name, key): - segment_memberhsip = True - break - - rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) - if not (segment_memberhsip or key_membership): - rbs_segment_conditions.update({rbs_segment: [condition for condition in self._rbs_segment_storage.get(rbs_segment).conditions]}) - - return EvaluationContext( - splits, - { segment: self._segment_storage.segment_contains(segment, key) - for segment in pending_memberships - }, - rbs_segment_memberships, - rbs_segment_conditions - ) + + return EvaluationContext(splits, { + segment: self._segment_storage.segment_contains(segment, key) + for segment in pending_memberships + }) + class AsyncEvaluationDataFactory: - def __init__(self, split_storage, segment_storage, rbs_segment_storage): + def __init__(self, split_storage, segment_storage): self._flag_storage = split_storage self._segment_storage = segment_storage - self._rbs_segment_storage = rbs_segment_storage - + async def context_for(self, key, feature_names): """ Recursively iterate & fetch all data required to evaluate these flags. @@ -172,47 +148,23 @@ async def context_for(self, key, feature_names): pending = set(feature_names) splits = {} pending_memberships = set() - pending_rbs_memberships = set() while pending: fetched = await self._flag_storage.fetch_many(list(pending)) features = filter_missing(fetched) splits.update(features) pending = set() for feature in features.values(): - cf, cs, crbs = get_dependencies(feature) + cf, cs = get_dependencies(feature) pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) - pending_rbs_memberships.update(crbs) - + segment_names = list(pending_memberships) segment_memberships = await asyncio.gather(*[ self._segment_storage.segment_contains(segment, key) for segment in segment_names ]) - rbs_segment_memberships = {} - rbs_segment_conditions = {} - key_membership = False - segment_memberhsip = False - for rbs_segment in pending_rbs_memberships: - rbs_segment_obj = await self._rbs_segment_storage.get(rbs_segment) - key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() - segment_memberhsip = False - for segment_name in rbs_segment_obj.excluded.get_excluded_segments(): - if await self._segment_storage.segment_contains(segment_name, key): - segment_memberhsip = True - break - - rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) - if not (segment_memberhsip or key_membership): - rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) - - return EvaluationContext( - splits, - dict(zip(segment_names, segment_memberships)), - rbs_segment_memberships, - rbs_segment_conditions - ) + return EvaluationContext(splits, dict(zip(segment_names, segment_memberships))) def get_dependencies(feature): @@ -221,17 +173,14 @@ def get_dependencies(feature): """ feature_names = [] segment_names = [] - rbs_segment_names = [] for condition in feature.conditions: for matcher in condition.matchers: - if isinstance(matcher,RuleBasedSegmentMatcher): - rbs_segment_names.append(matcher._rbs_segment_name) if isinstance(matcher,UserDefinedSegmentMatcher): segment_names.append(matcher._segment_name) elif isinstance(matcher, DependencyMatcher): feature_names.append(matcher._split_name) - return feature_names, segment_names, rbs_segment_names + return feature_names, segment_names def filter_missing(features): return {k: v for (k, v) in features.items() if v is not None} diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 6268ad1d..67c7387d 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -2,108 +2,12 @@ import logging import pytest -from splitio.models.splits import Split, Status +from splitio.models.splits import Split from splitio.models.grammar.condition import Condition, ConditionType from splitio.models.impressions import Label -from splitio.models.grammar import condition -from splitio.models import rule_based_segments from splitio.engine import evaluator, splitters from splitio.engine.evaluator import EvaluationContext -from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySegmentStorage, InMemoryRuleBasedSegmentStorage, \ - InMemorySplitStorageAsync, InMemorySegmentStorageAsync, InMemoryRuleBasedSegmentStorageAsync -from splitio.engine.evaluator import EvaluationDataFactory, AsyncEvaluationDataFactory -rbs_raw = { - "changeNumber": 123, - "name": "sample_rule_based_segment", - "status": "ACTIVE", - "trafficTypeName": "user", - "excluded":{ - "keys":["mauro@split.io","gaston@split.io"], - "segments":[] - }, - "conditions": [ - { - "matcherGroup": { - "combiner": "AND", - "matchers": [ - { - "keySelector": { - "trafficType": "user", - "attribute": "email" - }, - "matcherType": "ENDS_WITH", - "negate": False, - "whitelistMatcherData": { - "whitelist": [ - "@split.io" - ] - } - } - ] - } - } - ] -} - -split_conditions = [ - condition.from_raw({ - "conditionType": "ROLLOUT", - "matcherGroup": { - "combiner": "AND", - "matchers": [ - { - "keySelector": { - "trafficType": "user" - }, - "matcherType": "IN_RULE_BASED_SEGMENT", - "negate": False, - "userDefinedSegmentMatcherData": { - "segmentName": "sample_rule_based_segment" - } - } - ] - }, - "partitions": [ - { - "treatment": "on", - "size": 100 - }, - { - "treatment": "off", - "size": 0 - } - ], - "label": "in rule based segment sample_rule_based_segment" - }), - condition.from_raw({ - "conditionType": "ROLLOUT", - "matcherGroup": { - "combiner": "AND", - "matchers": [ - { - "keySelector": { - "trafficType": "user" - }, - "matcherType": "ALL_KEYS", - "negate": False - } - ] - }, - "partitions": [ - { - "treatment": "on", - "size": 0 - }, - { - "treatment": "off", - "size": 100 - } - ], - "label": "default rule" - }) -] - class EvaluatorTests(object): """Test evaluator behavior.""" @@ -123,7 +27,7 @@ def test_evaluate_treatment_killed_split(self, mocker): mocked_split.killed = True mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'off' assert result['configurations'] == '{"some_property": 123}' @@ -141,7 +45,7 @@ def test_evaluate_treatment_ok(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == '{"some_property": 123}' @@ -150,6 +54,7 @@ def test_evaluate_treatment_ok(self, mocker): assert mocked_split.get_configurations_for.mock_calls == [mocker.call('on')] assert result['impressions_disabled'] == mocked_split.impressions_disabled + def test_evaluate_treatment_ok_no_config(self, mocker): """Test that a killed split returns the default treatment.""" e = self._build_evaluator_with_mocks(mocker) @@ -160,7 +65,7 @@ def test_evaluate_treatment_ok_no_config(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == None @@ -187,7 +92,7 @@ def test_evaluate_treatments(self, mocker): mocked_split2.change_number = 123 mocked_split2.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set()) results = e.eval_many_with_context('some_key', 'some_bucketing_key', ['feature2', 'feature4'], {}, ctx) result = results['feature4'] assert result['configurations'] == None @@ -210,7 +115,7 @@ def test_get_gtreatment_for_split_no_condition_matches(self, mocker): mocked_split.change_number = '123' mocked_split.conditions = [] mocked_split.get_configurations_for = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) assert e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, ctx) == ( 'off', Label.NO_CONDITION_MATCHED @@ -227,64 +132,6 @@ def test_get_gtreatment_for_split_non_rollout(self, mocker): mocked_split = mocker.Mock(spec=Split) mocked_split.killed = False mocked_split.conditions = [mocked_condition_1] - treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None, None)) + treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None)) assert treatment == 'on' assert label == 'some_label' - - def test_evaluate_treatment_with_rule_based_segment(self, mocker): - """Test that a non-killed split returns the appropriate treatment.""" - e = evaluator.Evaluator(splitters.Splitter()) - - mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': False}, segment_rbs_conditions={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw).conditions}) - result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) - assert result['treatment'] == 'on' - - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': True}, segment_rbs_conditions={'sample_rule_based_segment': []}) - result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) - assert result['treatment'] == 'off' - -class EvaluationDataFactoryTests(object): - """Test evaluation factory class.""" - - def test_get_context(self): - """Test context.""" - mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) - flag_storage = InMemorySplitStorage([]) - segment_storage = InMemorySegmentStorage() - rbs_segment_storage = InMemoryRuleBasedSegmentStorage() - flag_storage.update([mocked_split], [], -1) - rbs = rule_based_segments.from_raw(rbs_raw) - rbs_segment_storage.update([rbs], [], -1) - - eval_factory = EvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) - ec = eval_factory.context_for('bilal@split.io', ['some']) - assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} - - ec = eval_factory.context_for('mauro@split.io', ['some']) - assert ec.segment_rbs_conditions == {} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} - -class EvaluationDataFactoryAsyncTests(object): - """Test evaluation factory class.""" - - @pytest.mark.asyncio - async def test_get_context(self): - """Test context.""" - mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) - flag_storage = InMemorySplitStorageAsync([]) - segment_storage = InMemorySegmentStorageAsync() - rbs_segment_storage = InMemoryRuleBasedSegmentStorageAsync() - await flag_storage.update([mocked_split], [], -1) - rbs = rule_based_segments.from_raw(rbs_raw) - await rbs_segment_storage.update([rbs], [], -1) - - eval_factory = AsyncEvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) - ec = await eval_factory.context_for('bilal@split.io', ['some']) - assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} - - ec = await eval_factory.context_for('mauro@split.io', ['some']) - assert ec.segment_rbs_conditions == {} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} From 7a143ccfc3ac1b69fbddad9c2f8f02c9e2686caf Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Fri, 7 Mar 2025 17:33:51 -0300 Subject: [PATCH 06/56] updated evaluator --- splitio/client/client.py | 4 +- splitio/engine/evaluator.py | 81 +++++++++++++--- tests/engine/test_evaluator.py | 169 +++++++++++++++++++++++++++++++-- 3 files changed, 229 insertions(+), 25 deletions(-) diff --git a/splitio/client/client.py b/splitio/client/client.py index d4c37fa4..8e71030e 100644 --- a/splitio/client/client.py +++ b/splitio/client/client.py @@ -201,7 +201,7 @@ def __init__(self, factory, recorder, labels_enabled=True): :rtype: Client """ ClientBase.__init__(self, factory, recorder, labels_enabled) - self._context_factory = EvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments')) + self._context_factory = EvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments'), factory._get_storage('rule_based_segments')) def destroy(self): """ @@ -668,7 +668,7 @@ def __init__(self, factory, recorder, labels_enabled=True): :rtype: Client """ ClientBase.__init__(self, factory, recorder, labels_enabled) - self._context_factory = AsyncEvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments')) + self._context_factory = AsyncEvaluationDataFactory(factory._get_storage('splits'), factory._get_storage('segments'), factory._get_storage('rule_based_segments')) async def destroy(self): """ diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index f913ebba..80a75eec 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -6,10 +6,11 @@ from splitio.models.grammar.condition import ConditionType from splitio.models.grammar.matchers.misc import DependencyMatcher from splitio.models.grammar.matchers.keys import UserDefinedSegmentMatcher +from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher from splitio.optional.loaders import asyncio CONTROL = 'control' -EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships']) +EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships', 'segment_rbs_memberships', 'segment_rbs_conditions']) _LOGGER = logging.getLogger(__name__) @@ -98,9 +99,10 @@ def _treatment_for_flag(self, flag, key, bucketing, attributes, ctx): class EvaluationDataFactory: - def __init__(self, split_storage, segment_storage): + def __init__(self, split_storage, segment_storage, rbs_segment_storage): self._flag_storage = split_storage self._segment_storage = segment_storage + self._rbs_segment_storage = rbs_segment_storage def context_for(self, key, feature_names): """ @@ -114,28 +116,50 @@ def context_for(self, key, feature_names): pending = set(feature_names) splits = {} pending_memberships = set() + pending_rbs_memberships = set() while pending: fetched = self._flag_storage.fetch_many(list(pending)) features = filter_missing(fetched) splits.update(features) pending = set() for feature in features.values(): - cf, cs = get_dependencies(feature) + cf, cs, crbs = get_dependencies(feature) pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) - - return EvaluationContext(splits, { - segment: self._segment_storage.segment_contains(segment, key) - for segment in pending_memberships - }) - + pending_rbs_memberships.update(crbs) + + rbs_segment_memberships = {} + rbs_segment_conditions = {} + key_membership = False + segment_memberhsip = False + for rbs_segment in pending_rbs_memberships: + key_membership = key in self._rbs_segment_storage.get(rbs_segment).excluded.get_excluded_keys() + segment_memberhsip = False + for segment_name in self._rbs_segment_storage.get(rbs_segment).excluded.get_excluded_segments(): + if self._segment_storage.segment_contains(segment_name, key): + segment_memberhsip = True + break + + rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) + if not (segment_memberhsip or key_membership): + rbs_segment_conditions.update({rbs_segment: [condition for condition in self._rbs_segment_storage.get(rbs_segment).conditions]}) + + return EvaluationContext( + splits, + { segment: self._segment_storage.segment_contains(segment, key) + for segment in pending_memberships + }, + rbs_segment_memberships, + rbs_segment_conditions + ) class AsyncEvaluationDataFactory: - def __init__(self, split_storage, segment_storage): + def __init__(self, split_storage, segment_storage, rbs_segment_storage): self._flag_storage = split_storage self._segment_storage = segment_storage - + self._rbs_segment_storage = rbs_segment_storage + async def context_for(self, key, feature_names): """ Recursively iterate & fetch all data required to evaluate these flags. @@ -148,23 +172,47 @@ async def context_for(self, key, feature_names): pending = set(feature_names) splits = {} pending_memberships = set() + pending_rbs_memberships = set() while pending: fetched = await self._flag_storage.fetch_many(list(pending)) features = filter_missing(fetched) splits.update(features) pending = set() for feature in features.values(): - cf, cs = get_dependencies(feature) + cf, cs, crbs = get_dependencies(feature) pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) - + pending_rbs_memberships.update(crbs) + segment_names = list(pending_memberships) segment_memberships = await asyncio.gather(*[ self._segment_storage.segment_contains(segment, key) for segment in segment_names ]) - return EvaluationContext(splits, dict(zip(segment_names, segment_memberships))) + rbs_segment_memberships = {} + rbs_segment_conditions = {} + key_membership = False + segment_memberhsip = False + for rbs_segment in pending_rbs_memberships: + rbs_segment_obj = await self._rbs_segment_storage.get(rbs_segment) + key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() + segment_memberhsip = False + for segment_name in rbs_segment_obj.excluded.get_excluded_segments(): + if await self._segment_storage.segment_contains(segment_name, key): + segment_memberhsip = True + break + + rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) + if not (segment_memberhsip or key_membership): + rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) + + return EvaluationContext( + splits, + dict(zip(segment_names, segment_memberships)), + rbs_segment_memberships, + rbs_segment_conditions + ) def get_dependencies(feature): @@ -173,14 +221,17 @@ def get_dependencies(feature): """ feature_names = [] segment_names = [] + rbs_segment_names = [] for condition in feature.conditions: for matcher in condition.matchers: + if isinstance(matcher,RuleBasedSegmentMatcher): + rbs_segment_names.append(matcher._rbs_segment_name) if isinstance(matcher,UserDefinedSegmentMatcher): segment_names.append(matcher._segment_name) elif isinstance(matcher, DependencyMatcher): feature_names.append(matcher._split_name) - return feature_names, segment_names + return feature_names, segment_names, rbs_segment_names def filter_missing(features): return {k: v for (k, v) in features.items() if v is not None} diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 67c7387d..6268ad1d 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -2,12 +2,108 @@ import logging import pytest -from splitio.models.splits import Split +from splitio.models.splits import Split, Status from splitio.models.grammar.condition import Condition, ConditionType from splitio.models.impressions import Label +from splitio.models.grammar import condition +from splitio.models import rule_based_segments from splitio.engine import evaluator, splitters from splitio.engine.evaluator import EvaluationContext +from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySegmentStorage, InMemoryRuleBasedSegmentStorage, \ + InMemorySplitStorageAsync, InMemorySegmentStorageAsync, InMemoryRuleBasedSegmentStorageAsync +from splitio.engine.evaluator import EvaluationDataFactory, AsyncEvaluationDataFactory +rbs_raw = { + "changeNumber": 123, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": False, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] +} + +split_conditions = [ + condition.from_raw({ + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }), + condition.from_raw({ + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "ALL_KEYS", + "negate": False + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + }) +] + class EvaluatorTests(object): """Test evaluator behavior.""" @@ -27,7 +123,7 @@ def test_evaluate_treatment_killed_split(self, mocker): mocked_split.killed = True mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'off' assert result['configurations'] == '{"some_property": 123}' @@ -45,7 +141,7 @@ def test_evaluate_treatment_ok(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == '{"some_property": 123}' @@ -54,7 +150,6 @@ def test_evaluate_treatment_ok(self, mocker): assert mocked_split.get_configurations_for.mock_calls == [mocker.call('on')] assert result['impressions_disabled'] == mocked_split.impressions_disabled - def test_evaluate_treatment_ok_no_config(self, mocker): """Test that a killed split returns the default treatment.""" e = self._build_evaluator_with_mocks(mocker) @@ -65,7 +160,7 @@ def test_evaluate_treatment_ok_no_config(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == None @@ -92,7 +187,7 @@ def test_evaluate_treatments(self, mocker): mocked_split2.change_number = 123 mocked_split2.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set()) + ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) results = e.eval_many_with_context('some_key', 'some_bucketing_key', ['feature2', 'feature4'], {}, ctx) result = results['feature4'] assert result['configurations'] == None @@ -115,7 +210,7 @@ def test_get_gtreatment_for_split_no_condition_matches(self, mocker): mocked_split.change_number = '123' mocked_split.conditions = [] mocked_split.get_configurations_for = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set()) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) assert e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, ctx) == ( 'off', Label.NO_CONDITION_MATCHED @@ -132,6 +227,64 @@ def test_get_gtreatment_for_split_non_rollout(self, mocker): mocked_split = mocker.Mock(spec=Split) mocked_split.killed = False mocked_split.conditions = [mocked_condition_1] - treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None)) + treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None, None)) assert treatment == 'on' assert label == 'some_label' + + def test_evaluate_treatment_with_rule_based_segment(self, mocker): + """Test that a non-killed split returns the appropriate treatment.""" + e = evaluator.Evaluator(splitters.Splitter()) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': False}, segment_rbs_conditions={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw).conditions}) + result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) + assert result['treatment'] == 'on' + + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': True}, segment_rbs_conditions={'sample_rule_based_segment': []}) + result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) + assert result['treatment'] == 'off' + +class EvaluationDataFactoryTests(object): + """Test evaluation factory class.""" + + def test_get_context(self): + """Test context.""" + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + flag_storage = InMemorySplitStorage([]) + segment_storage = InMemorySegmentStorage() + rbs_segment_storage = InMemoryRuleBasedSegmentStorage() + flag_storage.update([mocked_split], [], -1) + rbs = rule_based_segments.from_raw(rbs_raw) + rbs_segment_storage.update([rbs], [], -1) + + eval_factory = EvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) + ec = eval_factory.context_for('bilal@split.io', ['some']) + assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} + assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + + ec = eval_factory.context_for('mauro@split.io', ['some']) + assert ec.segment_rbs_conditions == {} + assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} + +class EvaluationDataFactoryAsyncTests(object): + """Test evaluation factory class.""" + + @pytest.mark.asyncio + async def test_get_context(self): + """Test context.""" + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + flag_storage = InMemorySplitStorageAsync([]) + segment_storage = InMemorySegmentStorageAsync() + rbs_segment_storage = InMemoryRuleBasedSegmentStorageAsync() + await flag_storage.update([mocked_split], [], -1) + rbs = rule_based_segments.from_raw(rbs_raw) + await rbs_segment_storage.update([rbs], [], -1) + + eval_factory = AsyncEvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) + ec = await eval_factory.context_for('bilal@split.io', ['some']) + assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} + assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + + ec = await eval_factory.context_for('mauro@split.io', ['some']) + assert ec.segment_rbs_conditions == {} + assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} From 5bda502b3b14917cce7c7268d08a1624ab4f66d7 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 10 Mar 2025 20:13:43 -0300 Subject: [PATCH 07/56] Updated sync and api classes --- splitio/api/commons.py | 20 +- splitio/api/splits.py | 14 +- splitio/sync/split.py | 103 +++--- tests/api/test_segments_api.py | 14 +- tests/api/test_splits_api.py | 28 +- tests/sync/test_splits_synchronizer.py | 431 +++++++++++++++++-------- 6 files changed, 417 insertions(+), 193 deletions(-) diff --git a/splitio/api/commons.py b/splitio/api/commons.py index 2ca75595..9dda1ee0 100644 --- a/splitio/api/commons.py +++ b/splitio/api/commons.py @@ -57,7 +57,7 @@ def record_telemetry(status_code, elapsed, metric_name, telemetry_runtime_produc class FetchOptions(object): """Fetch Options object.""" - def __init__(self, cache_control_headers=False, change_number=None, sets=None, spec=SPEC_VERSION): + def __init__(self, cache_control_headers=False, change_number=None, rbs_change_number=None, sets=None, spec=SPEC_VERSION): """ Class constructor. @@ -72,6 +72,7 @@ def __init__(self, cache_control_headers=False, change_number=None, sets=None, s """ self._cache_control_headers = cache_control_headers self._change_number = change_number + self._rbs_change_number = rbs_change_number self._sets = sets self._spec = spec @@ -85,6 +86,11 @@ def change_number(self): """Return change number.""" return self._change_number + @property + def rbs_change_number(self): + """Return change number.""" + return self._rbs_change_number + @property def sets(self): """Return sets.""" @@ -103,14 +109,19 @@ def __eq__(self, other): if self._change_number != other._change_number: return False + if self._rbs_change_number != other._rbs_change_number: + return False + if self._sets != other._sets: return False + if self._spec != other._spec: return False + return True -def build_fetch(change_number, fetch_options, metadata): +def build_fetch(change_number, fetch_options, metadata, rbs_change_number=None): """ Build fetch with new flags if that is the case. @@ -123,11 +134,16 @@ def build_fetch(change_number, fetch_options, metadata): :param metadata: Metadata Headers. :type metadata: dict + :param rbs_change_number: Last known timestamp of a rule based segment modification. + :type rbs_change_number: int + :return: Objects for fetch :rtype: dict, dict """ query = {'s': fetch_options.spec} if fetch_options.spec is not None else {} query['since'] = change_number + if rbs_change_number is not None: + query['rbSince'] = rbs_change_number extra_headers = metadata if fetch_options is None: return query, extra_headers diff --git a/splitio/api/splits.py b/splitio/api/splits.py index 692fde3b..f013497a 100644 --- a/splitio/api/splits.py +++ b/splitio/api/splits.py @@ -31,13 +31,16 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._telemetry_runtime_producer = telemetry_runtime_producer self._client.set_telemetry_data(HTTPExceptionsAndLatencies.SPLIT, self._telemetry_runtime_producer) - def fetch_splits(self, change_number, fetch_options): + def fetch_splits(self, change_number, rbs_change_number, fetch_options): """ Fetch feature flags from backend. :param change_number: Last known timestamp of a split modification. :type change_number: int + :param rbs_change_number: Last known timestamp of a rule based segment modification. + :type rbs_change_number: int + :param fetch_options: Fetch options for getting feature flag definitions. :type fetch_options: splitio.api.commons.FetchOptions @@ -45,7 +48,7 @@ def fetch_splits(self, change_number, fetch_options): :rtype: dict """ try: - query, extra_headers = build_fetch(change_number, fetch_options, self._metadata) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = self._client.get( 'sdk', 'splitChanges', @@ -86,12 +89,15 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._telemetry_runtime_producer = telemetry_runtime_producer self._client.set_telemetry_data(HTTPExceptionsAndLatencies.SPLIT, self._telemetry_runtime_producer) - async def fetch_splits(self, change_number, fetch_options): + async def fetch_splits(self, change_number, rbs_change_number, fetch_options): """ Fetch feature flags from backend. :param change_number: Last known timestamp of a split modification. :type change_number: int + + :param rbs_change_number: Last known timestamp of a rule based segment modification. + :type rbs_change_number: int :param fetch_options: Fetch options for getting feature flag definitions. :type fetch_options: splitio.api.commons.FetchOptions @@ -100,7 +106,7 @@ async def fetch_splits(self, change_number, fetch_options): :rtype: dict """ try: - query, extra_headers = build_fetch(change_number, fetch_options, self._metadata) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = await self._client.get( 'sdk', 'splitChanges', diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 7bb13117..e24a21a0 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -10,10 +10,11 @@ from splitio.api import APIException, APIUriException from splitio.api.commons import FetchOptions from splitio.client.input_validator import validate_flag_sets -from splitio.models import splits +from splitio.models import splits, rule_based_segments from splitio.util.backoff import Backoff from splitio.util.time import get_current_epoch_time_ms -from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async +from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async, \ + update_rule_based_segment_storage, update_rule_based_segment_storage_async from splitio.sync import util from splitio.optional.loaders import asyncio, aiofiles @@ -32,7 +33,7 @@ class SplitSynchronizerBase(object): """Feature Flag changes synchronizer.""" - def __init__(self, feature_flag_api, feature_flag_storage): + def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage): """ Class constructor. @@ -44,6 +45,7 @@ def __init__(self, feature_flag_api, feature_flag_storage): """ self._api = feature_flag_api self._feature_flag_storage = feature_flag_storage + self._rule_based_segment_storage = rule_based_segment_storage self._backoff = Backoff( _ON_DEMAND_FETCH_BACKOFF_BASE, _ON_DEMAND_FETCH_BACKOFF_MAX_WAIT) @@ -53,6 +55,11 @@ def feature_flag_storage(self): """Return Feature_flag storage object""" return self._feature_flag_storage + @property + def rule_based_segment_storage(self): + """Return rule base segment storage object""" + return self._rule_based_segment_storage + def _get_config_sets(self): """ Get all filter flag sets cnverrted to string, if no filter flagsets exist return None @@ -67,7 +74,7 @@ def _get_config_sets(self): class SplitSynchronizer(SplitSynchronizerBase): """Feature Flag changes synchronizer.""" - def __init__(self, feature_flag_api, feature_flag_storage): + def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage): """ Class constructor. @@ -77,7 +84,7 @@ def __init__(self, feature_flag_api, feature_flag_storage): :param feature_flag_storage: Feature Flag Storage. :type feature_flag_storage: splitio.storage.InMemorySplitStorage """ - SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage) + SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) def _fetch_until(self, fetch_options, till=None): """ @@ -97,12 +104,17 @@ def _fetch_until(self, fetch_options, till=None): change_number = self._feature_flag_storage.get_change_number() if change_number is None: change_number = -1 - if till is not None and till < change_number: + + rbs_change_number = self._rule_based_segment_storage.get_change_number() + if rbs_change_number is None: + rbs_change_number = -1 + + if till is not None and till < change_number and till < rbs_change_number: # the passed till is less than change_number, no need to perform updates - return change_number, segment_list + return change_number, rbs_change_number, segment_list try: - feature_flag_changes = self._api.fetch_splits(change_number, fetch_options) + feature_flag_changes = self._api.fetch_splits(change_number, rbs_change_number, fetch_options) except APIException as exc: if exc._status_code is not None and exc._status_code == 414: _LOGGER.error('Exception caught: the amount of flag sets provided are big causing uri length error.') @@ -112,15 +124,16 @@ def _fetch_until(self, fetch_options, till=None): _LOGGER.error('Exception raised while fetching feature flags') _LOGGER.debug('Exception information: ', exc_info=True) raise exc - fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('splits', [])] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes['till']) - if feature_flag_changes['till'] == feature_flag_changes['since']: - return feature_flag_changes['till'], segment_list - - fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('splits', [])] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes['till']) - if feature_flag_changes['till'] == feature_flag_changes['since']: - return feature_flag_changes['till'], segment_list + + fetched_rule_based_segments = [(rule_based_segments.from_raw(rule_based_segment)) for rule_based_segment in feature_flag_changes.get('rbs').get('d', [])] + rbs_segment_list = update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t']) + + fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] + segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t']) + segment_list.update(rbs_segment_list) + + if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: + return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list def _attempt_feature_flag_sync(self, fetch_options, till=None): """ @@ -140,13 +153,13 @@ def _attempt_feature_flag_sync(self, fetch_options, till=None): remaining_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES while True: remaining_attempts -= 1 - change_number, segment_list = self._fetch_until(fetch_options, till) + change_number, rbs_change_number, segment_list = self._fetch_until(fetch_options, till) final_segment_list.update(segment_list) - if till is None or till <= change_number: - return True, remaining_attempts, change_number, final_segment_list + if till is None or (till <= change_number and till <= rbs_change_number): + return True, remaining_attempts, change_number, rbs_change_number, final_segment_list elif remaining_attempts <= 0: - return False, remaining_attempts, change_number, final_segment_list + return False, remaining_attempts, change_number, rbs_change_number, final_segment_list how_long = self._backoff.get() time.sleep(how_long) @@ -172,7 +185,7 @@ def synchronize_splits(self, till=None): """ final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache - successful_sync, remaining_attempts, change_number, segment_list = self._attempt_feature_flag_sync(fetch_options, + successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = self._attempt_feature_flag_sync(fetch_options, till) final_segment_list.update(segment_list) attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts @@ -180,8 +193,8 @@ def synchronize_splits(self, till=None): _LOGGER.debug('Refresh completed in %d attempts.', attempts) return final_segment_list - with_cdn_bypass = FetchOptions(True, change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN - without_cdn_successful_sync, remaining_attempts, change_number, segment_list = self._attempt_feature_flag_sync(with_cdn_bypass, till) + with_cdn_bypass = FetchOptions(True, change_number, rbs_change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN + without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = self._attempt_feature_flag_sync(with_cdn_bypass, till) final_segment_list.update(segment_list) without_cdn_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if without_cdn_successful_sync: @@ -208,7 +221,7 @@ def kill_split(self, feature_flag_name, default_treatment, change_number): class SplitSynchronizerAsync(SplitSynchronizerBase): """Feature Flag changes synchronizer async.""" - def __init__(self, feature_flag_api, feature_flag_storage): + def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage): """ Class constructor. @@ -218,7 +231,7 @@ def __init__(self, feature_flag_api, feature_flag_storage): :param feature_flag_storage: Feature Flag Storage. :type feature_flag_storage: splitio.storage.InMemorySplitStorage """ - SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage) + SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) async def _fetch_until(self, fetch_options, till=None): """ @@ -238,12 +251,17 @@ async def _fetch_until(self, fetch_options, till=None): change_number = await self._feature_flag_storage.get_change_number() if change_number is None: change_number = -1 - if till is not None and till < change_number: + + rbs_change_number = await self._rule_based_segment_storage.get_change_number() + if rbs_change_number is None: + rbs_change_number = -1 + + if till is not None and till < change_number and till < rbs_change_number: # the passed till is less than change_number, no need to perform updates - return change_number, segment_list + return change_number, rbs_change_number, segment_list try: - feature_flag_changes = await self._api.fetch_splits(change_number, fetch_options) + feature_flag_changes = await self._api.fetch_splits(change_number, rbs_change_number, fetch_options) except APIException as exc: if exc._status_code is not None and exc._status_code == 414: _LOGGER.error('Exception caught: the amount of flag sets provided are big causing uri length error.') @@ -254,10 +272,15 @@ async def _fetch_until(self, fetch_options, till=None): _LOGGER.debug('Exception information: ', exc_info=True) raise exc - fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('splits', [])] - segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes['till']) - if feature_flag_changes['till'] == feature_flag_changes['since']: - return feature_flag_changes['till'], segment_list + fetched_rule_based_segments = [(rule_based_segments.from_raw(rule_based_segment)) for rule_based_segment in feature_flag_changes.get('rbs').get('d', [])] + rbs_segment_list = await update_rule_based_segment_storage_async(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t']) + + fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] + segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t']) + segment_list.update(rbs_segment_list) + + if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: + return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list async def _attempt_feature_flag_sync(self, fetch_options, till=None): """ @@ -277,13 +300,13 @@ async def _attempt_feature_flag_sync(self, fetch_options, till=None): remaining_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES while True: remaining_attempts -= 1 - change_number, segment_list = await self._fetch_until(fetch_options, till) + change_number, rbs_change_number, segment_list = await self._fetch_until(fetch_options, till) final_segment_list.update(segment_list) - if till is None or till <= change_number: - return True, remaining_attempts, change_number, final_segment_list + if till is None or (till <= change_number and till <= rbs_change_number): + return True, remaining_attempts, change_number, rbs_change_number, final_segment_list elif remaining_attempts <= 0: - return False, remaining_attempts, change_number, final_segment_list + return False, remaining_attempts, change_number, rbs_change_number, final_segment_list how_long = self._backoff.get() await asyncio.sleep(how_long) @@ -297,7 +320,7 @@ async def synchronize_splits(self, till=None): """ final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache - successful_sync, remaining_attempts, change_number, segment_list = await self._attempt_feature_flag_sync(fetch_options, + successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = await self._attempt_feature_flag_sync(fetch_options, till) final_segment_list.update(segment_list) attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts @@ -305,8 +328,8 @@ async def synchronize_splits(self, till=None): _LOGGER.debug('Refresh completed in %d attempts.', attempts) return final_segment_list - with_cdn_bypass = FetchOptions(True, change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN - without_cdn_successful_sync, remaining_attempts, change_number, segment_list = await self._attempt_feature_flag_sync(with_cdn_bypass, till) + with_cdn_bypass = FetchOptions(True, change_number, rbs_change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN + without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = await self._attempt_feature_flag_sync(with_cdn_bypass, till) final_segment_list.update(segment_list) without_cdn_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if without_cdn_successful_sync: diff --git a/tests/api/test_segments_api.py b/tests/api/test_segments_api.py index 73e3efe7..8681be59 100644 --- a/tests/api/test_segments_api.py +++ b/tests/api/test_segments_api.py @@ -16,7 +16,7 @@ def test_fetch_segment_changes(self, mocker): httpclient.get.return_value = client.HttpResponse(200, '{"prop1": "value1"}', {}) segment_api = segments.SegmentsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) - response = segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None)) + response = segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None, None)) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'segmentChanges/some_segment', 'some_api_key', extra_headers={ @@ -27,7 +27,7 @@ def test_fetch_segment_changes(self, mocker): query={'since': 123})] httpclient.reset_mock() - response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None)) + response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None, None)) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'segmentChanges/some_segment', 'some_api_key', extra_headers={ @@ -39,7 +39,7 @@ def test_fetch_segment_changes(self, mocker): query={'since': 123})] httpclient.reset_mock() - response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None)) + response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None, None)) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'segmentChanges/some_segment', 'some_api_key', extra_headers={ @@ -83,7 +83,7 @@ async def get(verb, url, key, query, extra_headers): return client.HttpResponse(200, '{"prop1": "value1"}', {}) httpclient.get = get - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'segmentChanges/some_segment' @@ -96,7 +96,7 @@ async def get(verb, url, key, query, extra_headers): assert self.query == {'since': 123} httpclient.reset_mock() - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'segmentChanges/some_segment' @@ -110,7 +110,7 @@ async def get(verb, url, key, query, extra_headers): assert self.query == {'since': 123} httpclient.reset_mock() - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'segmentChanges/some_segment' @@ -128,6 +128,6 @@ def raise_exception(*args, **kwargs): raise client.HttpClientException('some_message') httpclient.get = raise_exception with pytest.raises(APIException) as exc_info: - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None, None)) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' diff --git a/tests/api/test_splits_api.py b/tests/api/test_splits_api.py index d1d276b7..1826ec23 100644 --- a/tests/api/test_splits_api.py +++ b/tests/api/test_splits_api.py @@ -16,7 +16,7 @@ def test_fetch_split_changes(self, mocker): httpclient.get.return_value = client.HttpResponse(200, '{"prop1": "value1"}', {}) split_api = splits.SplitsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) - response = split_api.fetch_splits(123, FetchOptions(False, None, 'set1,set2')) + response = split_api.fetch_splits(123, -1, FetchOptions(False, None, None, 'set1,set2')) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'splitChanges', 'some_api_key', extra_headers={ @@ -24,10 +24,10 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' }, - query={'s': '1.1', 'since': 123, 'sets': 'set1,set2'})] + query={'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'})] httpclient.reset_mock() - response = split_api.fetch_splits(123, FetchOptions(True, 123, 'set3')) + response = split_api.fetch_splits(123, 1, FetchOptions(True, 123, None,'set3')) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'splitChanges', 'some_api_key', extra_headers={ @@ -36,10 +36,10 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'till': 123, 'sets': 'set3'})] + query={'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() - response = split_api.fetch_splits(123, FetchOptions(True, 123, 'set3')) + response = split_api.fetch_splits(123, 122, FetchOptions(True, 123, None, 'set3')) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'splitChanges', 'some_api_key', extra_headers={ @@ -48,14 +48,14 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'till': 123, 'sets': 'set3'})] + query={'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() def raise_exception(*args, **kwargs): raise client.HttpClientException('some_message') httpclient.get.side_effect = raise_exception with pytest.raises(APIException) as exc_info: - response = split_api.fetch_splits(123, FetchOptions()) + response = split_api.fetch_splits(123, 12, FetchOptions()) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' @@ -82,7 +82,7 @@ async def get(verb, url, key, query, extra_headers): return client.HttpResponse(200, '{"prop1": "value1"}', {}) httpclient.get = get - response = await split_api.fetch_splits(123, FetchOptions(False, None, 'set1,set2')) + response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, 'set1,set2')) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'splitChanges' @@ -92,10 +92,10 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' } - assert self.query == {'s': '1.1', 'since': 123, 'sets': 'set1,set2'} + assert self.query == {'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'} httpclient.reset_mock() - response = await split_api.fetch_splits(123, FetchOptions(True, 123, 'set3')) + response = await split_api.fetch_splits(123, 1, FetchOptions(True, 123, None, 'set3')) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'splitChanges' @@ -106,10 +106,10 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'till': 123, 'sets': 'set3'} + assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'} httpclient.reset_mock() - response = await split_api.fetch_splits(123, FetchOptions(True, 123)) + response = await split_api.fetch_splits(123, 122, FetchOptions(True, 123, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'splitChanges' @@ -120,13 +120,13 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'till': 123} + assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123} httpclient.reset_mock() def raise_exception(*args, **kwargs): raise client.HttpClientException('some_message') httpclient.get = raise_exception with pytest.raises(APIException) as exc_info: - response = await split_api.fetch_splits(123, FetchOptions()) + response = await split_api.fetch_splits(123, 12, FetchOptions()) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index b5aafd51..470c2241 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -9,9 +9,10 @@ from splitio.api import APIException from splitio.api.commons import FetchOptions from splitio.storage import SplitStorage -from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySplitStorageAsync +from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySplitStorageAsync, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.storage import FlagSetsFilter from splitio.models.splits import Split +from splitio.models.rule_based_segments import RuleBasedSegment from splitio.sync.split import SplitSynchronizer, SplitSynchronizerAsync, LocalSplitSynchronizer, LocalSplitSynchronizerAsync, LocalhostMode from splitio.optional.loaders import aiofiles, asyncio from tests.integration import splits_json @@ -52,42 +53,112 @@ 'sets': ['set1', 'set2'] }] -json_body = {'splits': [{ - 'changeNumber': 123, - 'trafficTypeName': 'user', - 'name': 'some_name', - 'trafficAllocation': 100, - 'trafficAllocationSeed': 123456, - 'seed': 321654, - 'status': 'ACTIVE', - 'killed': False, - 'defaultTreatment': 'off', - 'algo': 2, - 'conditions': [ - { - 'partitions': [ - {'treatment': 'on', 'size': 50}, - {'treatment': 'off', 'size': 50} - ], - 'contitionType': 'WHITELIST', - 'label': 'some_label', - 'matcherGroup': { - 'matchers': [ - { - 'matcherType': 'WHITELIST', - 'whitelistMatcherData': { - 'whitelist': ['k1', 'k2', 'k3'] - }, - 'negate': False, - } +json_body = { + "ff": { + "t":1675095324253, + "s":-1, + 'd': [{ + 'changeNumber': 123, + 'trafficTypeName': 'user', + 'name': 'some_name', + 'trafficAllocation': 100, + 'trafficAllocationSeed': 123456, + 'seed': 321654, + 'status': 'ACTIVE', + 'killed': False, + 'defaultTreatment': 'off', + 'algo': 2, + 'conditions': [ + { + 'partitions': [ + {'treatment': 'on', 'size': 50}, + {'treatment': 'off', 'size': 50} ], - 'combiner': 'AND' + 'contitionType': 'WHITELIST', + 'label': 'some_label', + 'matcherGroup': { + 'matchers': [ + { + 'matcherType': 'WHITELIST', + 'whitelistMatcherData': { + 'whitelist': ['k1', 'k2', 'k3'] + }, + 'negate': False, + } + ], + 'combiner': 'AND' + } + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }, + ], + 'sets': ['set1', 'set2']}] + }, + "rbs": { + "t": 1675095324253, + "s": -1, + "d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": False, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] } - } - ], - 'sets': ['set1', 'set2']}], - "till":1675095324253, - "since":-1, + } + ] + } + ] + } } class SplitsSynchronizerTests(object): @@ -98,13 +169,16 @@ class SplitsSynchronizerTests(object): def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=InMemorySplitStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) api = mocker.Mock() - def run(x, c): + def run(x, y, c): raise APIException("something broke") run._calls = 0 api.fetch_splits.side_effect = run storage.get_change_number.return_value = -1 + rbs_storage.get_change_number.return_value = -1 + class flag_set_filter(): def should_filter(): return False @@ -115,7 +189,7 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) with pytest.raises(APIException): split_synchronizer.synchronize_splits(1) @@ -123,21 +197,32 @@ def intersect(sets): def test_synchronize_splits(self, mocker): """Test split sync.""" storage = mocker.Mock(spec=InMemorySplitStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: return -1 return 123 + + def rbs_change_number_mock(): + rbs_change_number_mock._calls += 1 + if rbs_change_number_mock._calls == 1: + return -1 + return 123 + change_number_mock._calls = 0 + rbs_change_number_mock._calls = 0 storage.get_change_number.side_effect = change_number_mock - + rbs_storage.get_change_number.side_effect = rbs_change_number_mock + class flag_set_filter(): def should_filter(): return False def intersect(sets): return True + storage.flag_set_filter = flag_set_filter storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] @@ -147,35 +232,46 @@ def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { - 'splits': self.splits, - 'since': -1, - 'till': 123 - } + return json_body else: return { - 'splits': [], - 'since': 123, - 'till': 123 + "ff": { + "t":123, + "s":123, + 'd': [] + }, + "rbs": { + "t": 5, + "s": 5, + "d": [] + } } + get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer.synchronize_splits() - + assert api.fetch_splits.mock_calls[0][1][0] == -1 - assert api.fetch_splits.mock_calls[0][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[0][1][2].cache_control_headers == True assert api.fetch_splits.mock_calls[1][1][0] == 123 - assert api.fetch_splits.mock_calls[1][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[1][1][1] == 123 + assert api.fetch_splits.mock_calls[1][1][2].cache_control_headers == True inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' + inserted_rbs = rbs_storage.update.mock_calls[0][1][0][0] + assert isinstance(inserted_rbs, RuleBasedSegment) + assert inserted_rbs.name == 'sample_rule_based_segment' + def test_not_called_on_till(self, mocker): """Test that sync is not called when till is less than previous changenumber""" storage = mocker.Mock(spec=InMemorySplitStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + class flag_set_filter(): def should_filter(): return False @@ -189,6 +285,7 @@ def intersect(sets): def change_number_mock(): return 2 storage.get_change_number.side_effect = change_number_mock + rbs_storage.get_change_number.side_effect = change_number_mock def get_changes(*args, **kwargs): get_changes.called += 1 @@ -199,7 +296,7 @@ def get_changes(*args, **kwargs): api = mocker.Mock() api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer.synchronize_splits(1) assert get_changes.called == 0 @@ -209,6 +306,7 @@ def test_synchronize_splits_cdn(self, mocker): mocker.patch('splitio.sync.split._ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES', new=3) storage = mocker.Mock(spec=InMemorySplitStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) def change_number_mock(): change_number_mock._calls += 1 @@ -219,24 +317,39 @@ def change_number_mock(): elif change_number_mock._calls <= 7: return 1234 return 12345 # Return proper cn for CDN Bypass + + def rbs_change_number_mock(): + rbs_change_number_mock._calls += 1 + if rbs_change_number_mock._calls == 1: + return -1 + return 12345 # Return proper cn for CDN Bypass + change_number_mock._calls = 0 + rbs_change_number_mock._calls = 0 storage.get_change_number.side_effect = change_number_mock + rbs_storage.get_change_number.side_effect = rbs_change_number_mock api = mocker.Mock() def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': self.splits, 'since': -1, 'till': 123 } + return { 'ff': { 'd': self.splits, 's': -1, 't': 123 }, + 'rbs': {"t": 123, "s": -1, "d": []}} elif get_changes.called == 2: - return { 'splits': [], 'since': 123, 'till': 123 } + return { 'ff': { 'd': [], 's': 123, 't': 123 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 3: - return { 'splits': [], 'since': 123, 'till': 1234 } + return { 'ff': { 'd': [], 's': 123, 't': 1234 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called >= 4 and get_changes.called <= 6: - return { 'splits': [], 'since': 1234, 'till': 1234 } + return { 'ff': { 'd': [], 's': 1234, 't': 1234 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 7: - return { 'splits': [], 'since': 1234, 'till': 12345 } - return { 'splits': [], 'since': 12345, 'till': 12345 } + return { 'ff': { 'd': [], 's': 1234, 't': 12345 }, + 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, + 'rbs': {"t": 123, "s": 123, "d": []}} get_changes.called = 0 api.fetch_splits.side_effect = get_changes @@ -251,20 +364,20 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) split_synchronizer.synchronize_splits() assert api.fetch_splits.mock_calls[0][1][0] == -1 - assert api.fetch_splits.mock_calls[0][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[0][1][2].cache_control_headers == True assert api.fetch_splits.mock_calls[1][1][0] == 123 - assert api.fetch_splits.mock_calls[1][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[1][1][2].cache_control_headers == True split_synchronizer._backoff = Backoff(1, 0.1) split_synchronizer.synchronize_splits(12345) assert api.fetch_splits.mock_calls[3][1][0] == 1234 - assert api.fetch_splits.mock_calls[3][1][1].cache_control_headers == True - assert len(api.fetch_splits.mock_calls) == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) + assert api.fetch_splits.mock_calls[3][1][2].cache_control_headers == True + assert len(api.fetch_splits.mock_calls) == 10 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) @@ -273,31 +386,36 @@ def intersect(sets): def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorage(['set1', 'set2']) - - split = self.splits[0].copy() + rbs_storage = InMemoryRuleBasedSegmentStorage() + + split = copy.deepcopy(self.splits[0]) split['name'] = 'second' splits1 = [self.splits[0].copy(), split] - splits2 = self.splits.copy() - splits3 = self.splits.copy() - splits4 = self.splits.copy() + splits2 = copy.deepcopy(self.splits) + splits3 = copy.deepcopy(self.splits) + splits4 = copy.deepcopy(self.splits) api = mocker.Mock() def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': splits1, 'since': 123, 'till': 123 } + return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, + 'rbs': {'t': 123, 's': 123, 'd': []}} elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'splits': splits2, 'since': 124, 'till': 124 } + return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, + 'rbs': {'t': 124, 's': 124, 'd': []}} elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'splits': splits3, 'since': 12434, 'till': 12434 } + return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, + 'rbs': {'t': 12434, 's': 12434, 'd': []}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'new_split' - return { 'splits': splits4, 'since': 12438, 'till': 12438 } + return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, + 'rbs': {'t': 12438, 's': 12438, 'd': []}} get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) split_synchronizer.synchronize_splits() assert isinstance(storage.get('some_name'), Split) @@ -314,40 +432,44 @@ def get_changes(*args, **kwargs): def test_sync_flag_sets_without_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorage() - - split = self.splits[0].copy() + rbs_storage = InMemoryRuleBasedSegmentStorage() + split = copy.deepcopy(self.splits[0]) split['name'] = 'second' splits1 = [self.splits[0].copy(), split] - splits2 = self.splits.copy() - splits3 = self.splits.copy() - splits4 = self.splits.copy() + splits2 = copy.deepcopy(self.splits) + splits3 = copy.deepcopy(self.splits) + splits4 = copy.deepcopy(self.splits) api = mocker.Mock() def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': splits1, 'since': 123, 'till': 123 } + return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'splits': splits2, 'since': 124, 'till': 124 } + return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, + 'rbs': {"t": 124, "s": 124, "d": []}} elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'splits': splits3, 'since': 12434, 'till': 12434 } + return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, + 'rbs': {"t": 12434, "s": 12434, "d": []}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'third_split' - return { 'splits': splits4, 'since': 12438, 'till': 12438 } + return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, + 'rbs': {"t": 12438, "s": 12438, "d": []}} get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) split_synchronizer.synchronize_splits() - assert isinstance(storage.get('new_split'), Split) + assert isinstance(storage.get('some_name'), Split) split_synchronizer.synchronize_splits(124) - assert isinstance(storage.get('new_split'), Split) + assert isinstance(storage.get('some_name'), Split) split_synchronizer.synchronize_splits(12434) - assert isinstance(storage.get('new_split'), Split) + assert isinstance(storage.get('some_name'), Split) split_synchronizer.synchronize_splits(12438) assert isinstance(storage.get('third_split'), Split) @@ -361,17 +483,19 @@ class SplitsSynchronizerAsyncTests(object): async def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) api = mocker.Mock() - async def run(x, c): + async def run(x, y, c): raise APIException("something broke") run._calls = 0 api.fetch_splits = run async def get_change_number(*args): return -1 - storage.get_change_number = get_change_number - + storage.get_change_number = get_change_number + rbs_storage.get_change_number = get_change_number + class flag_set_filter(): def should_filter(): return False @@ -382,7 +506,7 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) with pytest.raises(APIException): await split_synchronizer.synchronize_splits(1) @@ -391,15 +515,24 @@ def intersect(sets): async def test_synchronize_splits(self, mocker): """Test split sync.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + async def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: return -1 return 123 + async def rbs_change_number_mock(): + rbs_change_number_mock._calls += 1 + if rbs_change_number_mock._calls == 1: + return -1 + return 123 + change_number_mock._calls = 0 + rbs_change_number_mock._calls = 0 storage.get_change_number = change_number_mock - + rbs_storage.get_change_number.side_effect = rbs_change_number_mock + class flag_set_filter(): def should_filter(): return False @@ -416,33 +549,42 @@ async def update(parsed_split, deleted, chanhe_number): self.parsed_split = parsed_split storage.update = update + self.parsed_rbs = None + async def update(parsed_rbs, deleted, chanhe_number): + if len(parsed_rbs) > 0: + self.parsed_rbs = parsed_rbs + rbs_storage.update = update + api = mocker.Mock() self.change_number_1 = None self.fetch_options_1 = None self.change_number_2 = None self.fetch_options_2 = None - async def get_changes(change_number, fetch_options): + async def get_changes(change_number, rbs_change_number, fetch_options): get_changes.called += 1 if get_changes.called == 1: self.change_number_1 = change_number self.fetch_options_1 = fetch_options - return { - 'splits': self.splits, - 'since': -1, - 'till': 123 - } + return json_body else: self.change_number_2 = change_number self.fetch_options_2 = fetch_options return { - 'splits': [], - 'since': 123, - 'till': 123 + "ff": { + "t":123, + "s":123, + 'd': [] + }, + "rbs": { + "t": 123, + "s": 123, + "d": [] + } } get_changes.called = 0 api.fetch_splits = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) await split_synchronizer.synchronize_splits() assert (-1, FetchOptions(True)._cache_control_headers) == (self.change_number_1, self.fetch_options_1._cache_control_headers) @@ -451,10 +593,17 @@ async def get_changes(change_number, fetch_options): assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' + inserted_rbs = self.parsed_rbs[0] + assert isinstance(inserted_rbs, RuleBasedSegment) + assert inserted_rbs.name == 'sample_rule_based_segment' + + @pytest.mark.asyncio async def test_not_called_on_till(self, mocker): """Test that sync is not called when till is less than previous changenumber""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + class flag_set_filter(): def should_filter(): return False @@ -468,7 +617,8 @@ def intersect(sets): async def change_number_mock(): return 2 storage.get_change_number = change_number_mock - + rbs_storage.get_change_number.side_effect = change_number_mock + async def get_changes(*args, **kwargs): get_changes.called += 1 return None @@ -476,7 +626,7 @@ async def get_changes(*args, **kwargs): api = mocker.Mock() api.fetch_splits = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) await split_synchronizer.synchronize_splits(1) assert get_changes.called == 0 @@ -485,7 +635,7 @@ async def test_synchronize_splits_cdn(self, mocker): """Test split sync with bypassing cdn.""" mocker.patch('splitio.sync.split._ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES', new=3) storage = mocker.Mock(spec=InMemorySplitStorageAsync) - + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) async def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: @@ -495,15 +645,27 @@ async def change_number_mock(): elif change_number_mock._calls <= 7: return 1234 return 12345 # Return proper cn for CDN Bypass + async def rbs_change_number_mock(): + rbs_change_number_mock._calls += 1 + if rbs_change_number_mock._calls == 1: + return -1 + return 12345 # Return proper cn for CDN Bypass + change_number_mock._calls = 0 + rbs_change_number_mock._calls = 0 storage.get_change_number = change_number_mock - + rbs_storage.get_change_number = rbs_change_number_mock + self.parsed_split = None async def update(parsed_split, deleted, change_number): if len(parsed_split) > 0: self.parsed_split = parsed_split storage.update = update + async def rbs_update(parsed, deleted, change_number): + pass + rbs_storage.update = rbs_update + api = mocker.Mock() self.change_number_1 = None self.fetch_options_1 = None @@ -511,25 +673,32 @@ async def update(parsed_split, deleted, change_number): self.fetch_options_2 = None self.change_number_3 = None self.fetch_options_3 = None - async def get_changes(change_number, fetch_options): + async def get_changes(change_number, rbs_change_number, fetch_options): get_changes.called += 1 if get_changes.called == 1: self.change_number_1 = change_number self.fetch_options_1 = fetch_options - return { 'splits': self.splits, 'since': -1, 'till': 123 } + return { 'ff': { 'd': self.splits, 's': -1, 't': 123 }, + 'rbs': {"t": 123, "s": -1, "d": []}} elif get_changes.called == 2: self.change_number_2 = change_number self.fetch_options_2 = fetch_options - return { 'splits': [], 'since': 123, 'till': 123 } + return { 'ff': { 'd': [], 's': 123, 't': 123 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 3: - return { 'splits': [], 'since': 123, 'till': 1234 } + return { 'ff': { 'd': [], 's': 123, 't': 1234 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called >= 4 and get_changes.called <= 6: - return { 'splits': [], 'since': 1234, 'till': 1234 } + return { 'ff': { 'd': [], 's': 1234, 't': 1234 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 7: - return { 'splits': [], 'since': 1234, 'till': 12345 } + return { 'ff': { 'd': [], 's': 1234, 't': 12345 }, + 'rbs': {"t": 123, "s": 123, "d": []}} self.change_number_3 = change_number self.fetch_options_3 = fetch_options - return { 'splits': [], 'since': 12345, 'till': 12345 } + return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, + 'rbs': {"t": 123, "s": 123, "d": []}} + get_changes.called = 0 api.fetch_splits = get_changes @@ -544,7 +713,7 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() @@ -554,7 +723,7 @@ def intersect(sets): split_synchronizer._backoff = Backoff(1, 0.1) await split_synchronizer.synchronize_splits(12345) assert (12345, True, 1234) == (self.change_number_3, self.fetch_options_3.cache_control_headers, self.fetch_options_3.change_number) - assert get_changes.called == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) + assert get_changes.called == 10 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) inserted_split = self.parsed_split[0] assert isinstance(inserted_split, Split) @@ -564,7 +733,8 @@ def intersect(sets): async def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorageAsync(['set1', 'set2']) - + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + split = self.splits[0].copy() split['name'] = 'second' splits1 = [self.splits[0].copy(), split] @@ -575,20 +745,25 @@ async def test_sync_flag_sets_with_config_sets(self, mocker): async def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': splits1, 'since': 123, 'till': 123 } + return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, + 'rbs': {'t': 123, 's': 123, 'd': []}} elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'splits': splits2, 'since': 124, 'till': 124 } + return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, + 'rbs': {'t': 124, 's': 124, 'd': []}} elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'splits': splits3, 'since': 12434, 'till': 12434 } + return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, + 'rbs': {'t': 12434, 's': 12434, 'd': []}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'new_split' - return { 'splits': splits4, 'since': 12438, 'till': 12438 } + return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, + 'rbs': {'t': 12438, 's': 12438, 'd': []}} + get_changes.called = 0 api.fetch_splits = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() assert isinstance(await storage.get('some_name'), Split) @@ -606,7 +781,7 @@ async def get_changes(*args, **kwargs): async def test_sync_flag_sets_without_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorageAsync() - + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() split = self.splits[0].copy() split['name'] = 'second' splits1 = [self.splits[0].copy(), split] @@ -617,20 +792,24 @@ async def test_sync_flag_sets_without_config_sets(self, mocker): async def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': splits1, 'since': 123, 'till': 123 } + return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'splits': splits2, 'since': 124, 'till': 124 } + return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, + 'rbs': {"t": 124, "s": 124, "d": []}} elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'splits': splits3, 'since': 12434, 'till': 12434 } + return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, + 'rbs': {"t": 12434, "s": 12434, "d": []}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'third_split' - return { 'splits': splits4, 'since': 12438, 'till': 12438 } + return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, + 'rbs': {"t": 12438, "s": 12438, "d": []}} get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() assert isinstance(await storage.get('new_split'), Split) From 3b6780e8b71e5e2a6b5555f2c75e4d3910f08905 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 10 Mar 2025 20:17:30 -0300 Subject: [PATCH 08/56] Revert "Updated sync and api classes" This reverts commit 5bda502b3b14917cce7c7268d08a1624ab4f66d7. --- splitio/api/commons.py | 20 +- splitio/api/splits.py | 14 +- splitio/sync/split.py | 103 +++--- tests/api/test_segments_api.py | 14 +- tests/api/test_splits_api.py | 28 +- tests/sync/test_splits_synchronizer.py | 431 ++++++++----------------- 6 files changed, 193 insertions(+), 417 deletions(-) diff --git a/splitio/api/commons.py b/splitio/api/commons.py index 9dda1ee0..2ca75595 100644 --- a/splitio/api/commons.py +++ b/splitio/api/commons.py @@ -57,7 +57,7 @@ def record_telemetry(status_code, elapsed, metric_name, telemetry_runtime_produc class FetchOptions(object): """Fetch Options object.""" - def __init__(self, cache_control_headers=False, change_number=None, rbs_change_number=None, sets=None, spec=SPEC_VERSION): + def __init__(self, cache_control_headers=False, change_number=None, sets=None, spec=SPEC_VERSION): """ Class constructor. @@ -72,7 +72,6 @@ def __init__(self, cache_control_headers=False, change_number=None, rbs_change_n """ self._cache_control_headers = cache_control_headers self._change_number = change_number - self._rbs_change_number = rbs_change_number self._sets = sets self._spec = spec @@ -86,11 +85,6 @@ def change_number(self): """Return change number.""" return self._change_number - @property - def rbs_change_number(self): - """Return change number.""" - return self._rbs_change_number - @property def sets(self): """Return sets.""" @@ -109,19 +103,14 @@ def __eq__(self, other): if self._change_number != other._change_number: return False - if self._rbs_change_number != other._rbs_change_number: - return False - if self._sets != other._sets: return False - if self._spec != other._spec: return False - return True -def build_fetch(change_number, fetch_options, metadata, rbs_change_number=None): +def build_fetch(change_number, fetch_options, metadata): """ Build fetch with new flags if that is the case. @@ -134,16 +123,11 @@ def build_fetch(change_number, fetch_options, metadata, rbs_change_number=None): :param metadata: Metadata Headers. :type metadata: dict - :param rbs_change_number: Last known timestamp of a rule based segment modification. - :type rbs_change_number: int - :return: Objects for fetch :rtype: dict, dict """ query = {'s': fetch_options.spec} if fetch_options.spec is not None else {} query['since'] = change_number - if rbs_change_number is not None: - query['rbSince'] = rbs_change_number extra_headers = metadata if fetch_options is None: return query, extra_headers diff --git a/splitio/api/splits.py b/splitio/api/splits.py index f013497a..692fde3b 100644 --- a/splitio/api/splits.py +++ b/splitio/api/splits.py @@ -31,16 +31,13 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._telemetry_runtime_producer = telemetry_runtime_producer self._client.set_telemetry_data(HTTPExceptionsAndLatencies.SPLIT, self._telemetry_runtime_producer) - def fetch_splits(self, change_number, rbs_change_number, fetch_options): + def fetch_splits(self, change_number, fetch_options): """ Fetch feature flags from backend. :param change_number: Last known timestamp of a split modification. :type change_number: int - :param rbs_change_number: Last known timestamp of a rule based segment modification. - :type rbs_change_number: int - :param fetch_options: Fetch options for getting feature flag definitions. :type fetch_options: splitio.api.commons.FetchOptions @@ -48,7 +45,7 @@ def fetch_splits(self, change_number, rbs_change_number, fetch_options): :rtype: dict """ try: - query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata) response = self._client.get( 'sdk', 'splitChanges', @@ -89,15 +86,12 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._telemetry_runtime_producer = telemetry_runtime_producer self._client.set_telemetry_data(HTTPExceptionsAndLatencies.SPLIT, self._telemetry_runtime_producer) - async def fetch_splits(self, change_number, rbs_change_number, fetch_options): + async def fetch_splits(self, change_number, fetch_options): """ Fetch feature flags from backend. :param change_number: Last known timestamp of a split modification. :type change_number: int - - :param rbs_change_number: Last known timestamp of a rule based segment modification. - :type rbs_change_number: int :param fetch_options: Fetch options for getting feature flag definitions. :type fetch_options: splitio.api.commons.FetchOptions @@ -106,7 +100,7 @@ async def fetch_splits(self, change_number, rbs_change_number, fetch_options): :rtype: dict """ try: - query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata) response = await self._client.get( 'sdk', 'splitChanges', diff --git a/splitio/sync/split.py b/splitio/sync/split.py index e24a21a0..7bb13117 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -10,11 +10,10 @@ from splitio.api import APIException, APIUriException from splitio.api.commons import FetchOptions from splitio.client.input_validator import validate_flag_sets -from splitio.models import splits, rule_based_segments +from splitio.models import splits from splitio.util.backoff import Backoff from splitio.util.time import get_current_epoch_time_ms -from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async, \ - update_rule_based_segment_storage, update_rule_based_segment_storage_async +from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async from splitio.sync import util from splitio.optional.loaders import asyncio, aiofiles @@ -33,7 +32,7 @@ class SplitSynchronizerBase(object): """Feature Flag changes synchronizer.""" - def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage): + def __init__(self, feature_flag_api, feature_flag_storage): """ Class constructor. @@ -45,7 +44,6 @@ def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_st """ self._api = feature_flag_api self._feature_flag_storage = feature_flag_storage - self._rule_based_segment_storage = rule_based_segment_storage self._backoff = Backoff( _ON_DEMAND_FETCH_BACKOFF_BASE, _ON_DEMAND_FETCH_BACKOFF_MAX_WAIT) @@ -55,11 +53,6 @@ def feature_flag_storage(self): """Return Feature_flag storage object""" return self._feature_flag_storage - @property - def rule_based_segment_storage(self): - """Return rule base segment storage object""" - return self._rule_based_segment_storage - def _get_config_sets(self): """ Get all filter flag sets cnverrted to string, if no filter flagsets exist return None @@ -74,7 +67,7 @@ def _get_config_sets(self): class SplitSynchronizer(SplitSynchronizerBase): """Feature Flag changes synchronizer.""" - def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage): + def __init__(self, feature_flag_api, feature_flag_storage): """ Class constructor. @@ -84,7 +77,7 @@ def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_st :param feature_flag_storage: Feature Flag Storage. :type feature_flag_storage: splitio.storage.InMemorySplitStorage """ - SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) + SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage) def _fetch_until(self, fetch_options, till=None): """ @@ -104,17 +97,12 @@ def _fetch_until(self, fetch_options, till=None): change_number = self._feature_flag_storage.get_change_number() if change_number is None: change_number = -1 - - rbs_change_number = self._rule_based_segment_storage.get_change_number() - if rbs_change_number is None: - rbs_change_number = -1 - - if till is not None and till < change_number and till < rbs_change_number: + if till is not None and till < change_number: # the passed till is less than change_number, no need to perform updates - return change_number, rbs_change_number, segment_list + return change_number, segment_list try: - feature_flag_changes = self._api.fetch_splits(change_number, rbs_change_number, fetch_options) + feature_flag_changes = self._api.fetch_splits(change_number, fetch_options) except APIException as exc: if exc._status_code is not None and exc._status_code == 414: _LOGGER.error('Exception caught: the amount of flag sets provided are big causing uri length error.') @@ -124,16 +112,15 @@ def _fetch_until(self, fetch_options, till=None): _LOGGER.error('Exception raised while fetching feature flags') _LOGGER.debug('Exception information: ', exc_info=True) raise exc - - fetched_rule_based_segments = [(rule_based_segments.from_raw(rule_based_segment)) for rule_based_segment in feature_flag_changes.get('rbs').get('d', [])] - rbs_segment_list = update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t']) - - fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t']) - segment_list.update(rbs_segment_list) - - if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: - return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list + fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('splits', [])] + segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes['till']) + if feature_flag_changes['till'] == feature_flag_changes['since']: + return feature_flag_changes['till'], segment_list + + fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('splits', [])] + segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes['till']) + if feature_flag_changes['till'] == feature_flag_changes['since']: + return feature_flag_changes['till'], segment_list def _attempt_feature_flag_sync(self, fetch_options, till=None): """ @@ -153,13 +140,13 @@ def _attempt_feature_flag_sync(self, fetch_options, till=None): remaining_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES while True: remaining_attempts -= 1 - change_number, rbs_change_number, segment_list = self._fetch_until(fetch_options, till) + change_number, segment_list = self._fetch_until(fetch_options, till) final_segment_list.update(segment_list) - if till is None or (till <= change_number and till <= rbs_change_number): - return True, remaining_attempts, change_number, rbs_change_number, final_segment_list + if till is None or till <= change_number: + return True, remaining_attempts, change_number, final_segment_list elif remaining_attempts <= 0: - return False, remaining_attempts, change_number, rbs_change_number, final_segment_list + return False, remaining_attempts, change_number, final_segment_list how_long = self._backoff.get() time.sleep(how_long) @@ -185,7 +172,7 @@ def synchronize_splits(self, till=None): """ final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache - successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = self._attempt_feature_flag_sync(fetch_options, + successful_sync, remaining_attempts, change_number, segment_list = self._attempt_feature_flag_sync(fetch_options, till) final_segment_list.update(segment_list) attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts @@ -193,8 +180,8 @@ def synchronize_splits(self, till=None): _LOGGER.debug('Refresh completed in %d attempts.', attempts) return final_segment_list - with_cdn_bypass = FetchOptions(True, change_number, rbs_change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN - without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = self._attempt_feature_flag_sync(with_cdn_bypass, till) + with_cdn_bypass = FetchOptions(True, change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN + without_cdn_successful_sync, remaining_attempts, change_number, segment_list = self._attempt_feature_flag_sync(with_cdn_bypass, till) final_segment_list.update(segment_list) without_cdn_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if without_cdn_successful_sync: @@ -221,7 +208,7 @@ def kill_split(self, feature_flag_name, default_treatment, change_number): class SplitSynchronizerAsync(SplitSynchronizerBase): """Feature Flag changes synchronizer async.""" - def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage): + def __init__(self, feature_flag_api, feature_flag_storage): """ Class constructor. @@ -231,7 +218,7 @@ def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_st :param feature_flag_storage: Feature Flag Storage. :type feature_flag_storage: splitio.storage.InMemorySplitStorage """ - SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) + SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage) async def _fetch_until(self, fetch_options, till=None): """ @@ -251,17 +238,12 @@ async def _fetch_until(self, fetch_options, till=None): change_number = await self._feature_flag_storage.get_change_number() if change_number is None: change_number = -1 - - rbs_change_number = await self._rule_based_segment_storage.get_change_number() - if rbs_change_number is None: - rbs_change_number = -1 - - if till is not None and till < change_number and till < rbs_change_number: + if till is not None and till < change_number: # the passed till is less than change_number, no need to perform updates - return change_number, rbs_change_number, segment_list + return change_number, segment_list try: - feature_flag_changes = await self._api.fetch_splits(change_number, rbs_change_number, fetch_options) + feature_flag_changes = await self._api.fetch_splits(change_number, fetch_options) except APIException as exc: if exc._status_code is not None and exc._status_code == 414: _LOGGER.error('Exception caught: the amount of flag sets provided are big causing uri length error.') @@ -272,15 +254,10 @@ async def _fetch_until(self, fetch_options, till=None): _LOGGER.debug('Exception information: ', exc_info=True) raise exc - fetched_rule_based_segments = [(rule_based_segments.from_raw(rule_based_segment)) for rule_based_segment in feature_flag_changes.get('rbs').get('d', [])] - rbs_segment_list = await update_rule_based_segment_storage_async(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t']) - - fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] - segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t']) - segment_list.update(rbs_segment_list) - - if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: - return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list + fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('splits', [])] + segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes['till']) + if feature_flag_changes['till'] == feature_flag_changes['since']: + return feature_flag_changes['till'], segment_list async def _attempt_feature_flag_sync(self, fetch_options, till=None): """ @@ -300,13 +277,13 @@ async def _attempt_feature_flag_sync(self, fetch_options, till=None): remaining_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES while True: remaining_attempts -= 1 - change_number, rbs_change_number, segment_list = await self._fetch_until(fetch_options, till) + change_number, segment_list = await self._fetch_until(fetch_options, till) final_segment_list.update(segment_list) - if till is None or (till <= change_number and till <= rbs_change_number): - return True, remaining_attempts, change_number, rbs_change_number, final_segment_list + if till is None or till <= change_number: + return True, remaining_attempts, change_number, final_segment_list elif remaining_attempts <= 0: - return False, remaining_attempts, change_number, rbs_change_number, final_segment_list + return False, remaining_attempts, change_number, final_segment_list how_long = self._backoff.get() await asyncio.sleep(how_long) @@ -320,7 +297,7 @@ async def synchronize_splits(self, till=None): """ final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache - successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = await self._attempt_feature_flag_sync(fetch_options, + successful_sync, remaining_attempts, change_number, segment_list = await self._attempt_feature_flag_sync(fetch_options, till) final_segment_list.update(segment_list) attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts @@ -328,8 +305,8 @@ async def synchronize_splits(self, till=None): _LOGGER.debug('Refresh completed in %d attempts.', attempts) return final_segment_list - with_cdn_bypass = FetchOptions(True, change_number, rbs_change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN - without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = await self._attempt_feature_flag_sync(with_cdn_bypass, till) + with_cdn_bypass = FetchOptions(True, change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN + without_cdn_successful_sync, remaining_attempts, change_number, segment_list = await self._attempt_feature_flag_sync(with_cdn_bypass, till) final_segment_list.update(segment_list) without_cdn_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if without_cdn_successful_sync: diff --git a/tests/api/test_segments_api.py b/tests/api/test_segments_api.py index 8681be59..73e3efe7 100644 --- a/tests/api/test_segments_api.py +++ b/tests/api/test_segments_api.py @@ -16,7 +16,7 @@ def test_fetch_segment_changes(self, mocker): httpclient.get.return_value = client.HttpResponse(200, '{"prop1": "value1"}', {}) segment_api = segments.SegmentsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) - response = segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None, None)) + response = segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None)) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'segmentChanges/some_segment', 'some_api_key', extra_headers={ @@ -27,7 +27,7 @@ def test_fetch_segment_changes(self, mocker): query={'since': 123})] httpclient.reset_mock() - response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None, None)) + response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None)) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'segmentChanges/some_segment', 'some_api_key', extra_headers={ @@ -39,7 +39,7 @@ def test_fetch_segment_changes(self, mocker): query={'since': 123})] httpclient.reset_mock() - response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None, None)) + response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None)) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'segmentChanges/some_segment', 'some_api_key', extra_headers={ @@ -83,7 +83,7 @@ async def get(verb, url, key, query, extra_headers): return client.HttpResponse(200, '{"prop1": "value1"}', {}) httpclient.get = get - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'segmentChanges/some_segment' @@ -96,7 +96,7 @@ async def get(verb, url, key, query, extra_headers): assert self.query == {'since': 123} httpclient.reset_mock() - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'segmentChanges/some_segment' @@ -110,7 +110,7 @@ async def get(verb, url, key, query, extra_headers): assert self.query == {'since': 123} httpclient.reset_mock() - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'segmentChanges/some_segment' @@ -128,6 +128,6 @@ def raise_exception(*args, **kwargs): raise client.HttpClientException('some_message') httpclient.get = raise_exception with pytest.raises(APIException) as exc_info: - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None)) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' diff --git a/tests/api/test_splits_api.py b/tests/api/test_splits_api.py index 1826ec23..d1d276b7 100644 --- a/tests/api/test_splits_api.py +++ b/tests/api/test_splits_api.py @@ -16,7 +16,7 @@ def test_fetch_split_changes(self, mocker): httpclient.get.return_value = client.HttpResponse(200, '{"prop1": "value1"}', {}) split_api = splits.SplitsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) - response = split_api.fetch_splits(123, -1, FetchOptions(False, None, None, 'set1,set2')) + response = split_api.fetch_splits(123, FetchOptions(False, None, 'set1,set2')) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'splitChanges', 'some_api_key', extra_headers={ @@ -24,10 +24,10 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' }, - query={'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'})] + query={'s': '1.1', 'since': 123, 'sets': 'set1,set2'})] httpclient.reset_mock() - response = split_api.fetch_splits(123, 1, FetchOptions(True, 123, None,'set3')) + response = split_api.fetch_splits(123, FetchOptions(True, 123, 'set3')) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'splitChanges', 'some_api_key', extra_headers={ @@ -36,10 +36,10 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'})] + query={'s': '1.1', 'since': 123, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() - response = split_api.fetch_splits(123, 122, FetchOptions(True, 123, None, 'set3')) + response = split_api.fetch_splits(123, FetchOptions(True, 123, 'set3')) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'splitChanges', 'some_api_key', extra_headers={ @@ -48,14 +48,14 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123, 'sets': 'set3'})] + query={'s': '1.1', 'since': 123, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() def raise_exception(*args, **kwargs): raise client.HttpClientException('some_message') httpclient.get.side_effect = raise_exception with pytest.raises(APIException) as exc_info: - response = split_api.fetch_splits(123, 12, FetchOptions()) + response = split_api.fetch_splits(123, FetchOptions()) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' @@ -82,7 +82,7 @@ async def get(verb, url, key, query, extra_headers): return client.HttpResponse(200, '{"prop1": "value1"}', {}) httpclient.get = get - response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, 'set1,set2')) + response = await split_api.fetch_splits(123, FetchOptions(False, None, 'set1,set2')) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'splitChanges' @@ -92,10 +92,10 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' } - assert self.query == {'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'} + assert self.query == {'s': '1.1', 'since': 123, 'sets': 'set1,set2'} httpclient.reset_mock() - response = await split_api.fetch_splits(123, 1, FetchOptions(True, 123, None, 'set3')) + response = await split_api.fetch_splits(123, FetchOptions(True, 123, 'set3')) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'splitChanges' @@ -106,10 +106,10 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'} + assert self.query == {'s': '1.1', 'since': 123, 'till': 123, 'sets': 'set3'} httpclient.reset_mock() - response = await split_api.fetch_splits(123, 122, FetchOptions(True, 123, None)) + response = await split_api.fetch_splits(123, FetchOptions(True, 123)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'splitChanges' @@ -120,13 +120,13 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123} + assert self.query == {'s': '1.1', 'since': 123, 'till': 123} httpclient.reset_mock() def raise_exception(*args, **kwargs): raise client.HttpClientException('some_message') httpclient.get = raise_exception with pytest.raises(APIException) as exc_info: - response = await split_api.fetch_splits(123, 12, FetchOptions()) + response = await split_api.fetch_splits(123, FetchOptions()) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index 470c2241..b5aafd51 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -9,10 +9,9 @@ from splitio.api import APIException from splitio.api.commons import FetchOptions from splitio.storage import SplitStorage -from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySplitStorageAsync, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync +from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySplitStorageAsync from splitio.storage import FlagSetsFilter from splitio.models.splits import Split -from splitio.models.rule_based_segments import RuleBasedSegment from splitio.sync.split import SplitSynchronizer, SplitSynchronizerAsync, LocalSplitSynchronizer, LocalSplitSynchronizerAsync, LocalhostMode from splitio.optional.loaders import aiofiles, asyncio from tests.integration import splits_json @@ -53,112 +52,42 @@ 'sets': ['set1', 'set2'] }] -json_body = { - "ff": { - "t":1675095324253, - "s":-1, - 'd': [{ - 'changeNumber': 123, - 'trafficTypeName': 'user', - 'name': 'some_name', - 'trafficAllocation': 100, - 'trafficAllocationSeed': 123456, - 'seed': 321654, - 'status': 'ACTIVE', - 'killed': False, - 'defaultTreatment': 'off', - 'algo': 2, - 'conditions': [ - { - 'partitions': [ - {'treatment': 'on', 'size': 50}, - {'treatment': 'off', 'size': 50} - ], - 'contitionType': 'WHITELIST', - 'label': 'some_label', - 'matcherGroup': { - 'matchers': [ - { - 'matcherType': 'WHITELIST', - 'whitelistMatcherData': { - 'whitelist': ['k1', 'k2', 'k3'] - }, - 'negate': False, - } - ], - 'combiner': 'AND' - } - }, - { - "conditionType": "ROLLOUT", - "matcherGroup": { - "combiner": "AND", - "matchers": [ - { - "keySelector": { - "trafficType": "user" - }, - "matcherType": "IN_RULE_BASED_SEGMENT", - "negate": False, - "userDefinedSegmentMatcherData": { - "segmentName": "sample_rule_based_segment" - } - } - ] - }, - "partitions": [ - { - "treatment": "on", - "size": 100 - }, - { - "treatment": "off", - "size": 0 - } +json_body = {'splits': [{ + 'changeNumber': 123, + 'trafficTypeName': 'user', + 'name': 'some_name', + 'trafficAllocation': 100, + 'trafficAllocationSeed': 123456, + 'seed': 321654, + 'status': 'ACTIVE', + 'killed': False, + 'defaultTreatment': 'off', + 'algo': 2, + 'conditions': [ + { + 'partitions': [ + {'treatment': 'on', 'size': 50}, + {'treatment': 'off', 'size': 50} ], - "label": "in rule based segment sample_rule_based_segment" - }, - ], - 'sets': ['set1', 'set2']}] - }, - "rbs": { - "t": 1675095324253, - "s": -1, - "d": [ - { - "changeNumber": 5, - "name": "sample_rule_based_segment", - "status": "ACTIVE", - "trafficTypeName": "user", - "excluded":{ - "keys":["mauro@split.io","gaston@split.io"], - "segments":[] - }, - "conditions": [ - { - "matcherGroup": { - "combiner": "AND", - "matchers": [ - { - "keySelector": { - "trafficType": "user", - "attribute": "email" - }, - "matcherType": "ENDS_WITH", - "negate": False, - "whitelistMatcherData": { - "whitelist": [ - "@split.io" - ] - } - } - ] + 'contitionType': 'WHITELIST', + 'label': 'some_label', + 'matcherGroup': { + 'matchers': [ + { + 'matcherType': 'WHITELIST', + 'whitelistMatcherData': { + 'whitelist': ['k1', 'k2', 'k3'] + }, + 'negate': False, + } + ], + 'combiner': 'AND' } - } - ] - } - ] - } + } + ], + 'sets': ['set1', 'set2']}], + "till":1675095324253, + "since":-1, } class SplitsSynchronizerTests(object): @@ -169,16 +98,13 @@ class SplitsSynchronizerTests(object): def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=InMemorySplitStorage) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) api = mocker.Mock() - def run(x, y, c): + def run(x, c): raise APIException("something broke") run._calls = 0 api.fetch_splits.side_effect = run storage.get_change_number.return_value = -1 - rbs_storage.get_change_number.return_value = -1 - class flag_set_filter(): def should_filter(): return False @@ -189,7 +115,7 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizer(api, storage) with pytest.raises(APIException): split_synchronizer.synchronize_splits(1) @@ -197,32 +123,21 @@ def intersect(sets): def test_synchronize_splits(self, mocker): """Test split sync.""" storage = mocker.Mock(spec=InMemorySplitStorage) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: return -1 return 123 - - def rbs_change_number_mock(): - rbs_change_number_mock._calls += 1 - if rbs_change_number_mock._calls == 1: - return -1 - return 123 - change_number_mock._calls = 0 - rbs_change_number_mock._calls = 0 storage.get_change_number.side_effect = change_number_mock - rbs_storage.get_change_number.side_effect = rbs_change_number_mock - + class flag_set_filter(): def should_filter(): return False def intersect(sets): return True - storage.flag_set_filter = flag_set_filter storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] @@ -232,46 +147,35 @@ def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return json_body + return { + 'splits': self.splits, + 'since': -1, + 'till': 123 + } else: return { - "ff": { - "t":123, - "s":123, - 'd': [] - }, - "rbs": { - "t": 5, - "s": 5, - "d": [] - } + 'splits': [], + 'since': 123, + 'till': 123 } - get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizer(api, storage) split_synchronizer.synchronize_splits() - + assert api.fetch_splits.mock_calls[0][1][0] == -1 - assert api.fetch_splits.mock_calls[0][1][2].cache_control_headers == True + assert api.fetch_splits.mock_calls[0][1][1].cache_control_headers == True assert api.fetch_splits.mock_calls[1][1][0] == 123 - assert api.fetch_splits.mock_calls[1][1][1] == 123 - assert api.fetch_splits.mock_calls[1][1][2].cache_control_headers == True + assert api.fetch_splits.mock_calls[1][1][1].cache_control_headers == True inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' - inserted_rbs = rbs_storage.update.mock_calls[0][1][0][0] - assert isinstance(inserted_rbs, RuleBasedSegment) - assert inserted_rbs.name == 'sample_rule_based_segment' - def test_not_called_on_till(self, mocker): """Test that sync is not called when till is less than previous changenumber""" storage = mocker.Mock(spec=InMemorySplitStorage) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) - class flag_set_filter(): def should_filter(): return False @@ -285,7 +189,6 @@ def intersect(sets): def change_number_mock(): return 2 storage.get_change_number.side_effect = change_number_mock - rbs_storage.get_change_number.side_effect = change_number_mock def get_changes(*args, **kwargs): get_changes.called += 1 @@ -296,7 +199,7 @@ def get_changes(*args, **kwargs): api = mocker.Mock() api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizer(api, storage) split_synchronizer.synchronize_splits(1) assert get_changes.called == 0 @@ -306,7 +209,6 @@ def test_synchronize_splits_cdn(self, mocker): mocker.patch('splitio.sync.split._ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES', new=3) storage = mocker.Mock(spec=InMemorySplitStorage) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) def change_number_mock(): change_number_mock._calls += 1 @@ -317,39 +219,24 @@ def change_number_mock(): elif change_number_mock._calls <= 7: return 1234 return 12345 # Return proper cn for CDN Bypass - - def rbs_change_number_mock(): - rbs_change_number_mock._calls += 1 - if rbs_change_number_mock._calls == 1: - return -1 - return 12345 # Return proper cn for CDN Bypass - change_number_mock._calls = 0 - rbs_change_number_mock._calls = 0 storage.get_change_number.side_effect = change_number_mock - rbs_storage.get_change_number.side_effect = rbs_change_number_mock api = mocker.Mock() def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'ff': { 'd': self.splits, 's': -1, 't': 123 }, - 'rbs': {"t": 123, "s": -1, "d": []}} + return { 'splits': self.splits, 'since': -1, 'till': 123 } elif get_changes.called == 2: - return { 'ff': { 'd': [], 's': 123, 't': 123 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': [], 'since': 123, 'till': 123 } elif get_changes.called == 3: - return { 'ff': { 'd': [], 's': 123, 't': 1234 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': [], 'since': 123, 'till': 1234 } elif get_changes.called >= 4 and get_changes.called <= 6: - return { 'ff': { 'd': [], 's': 1234, 't': 1234 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': [], 'since': 1234, 'till': 1234 } elif get_changes.called == 7: - return { 'ff': { 'd': [], 's': 1234, 't': 12345 }, - 'rbs': {"t": 123, "s": 123, "d": []}} - return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': [], 'since': 1234, 'till': 12345 } + return { 'splits': [], 'since': 12345, 'till': 12345 } get_changes.called = 0 api.fetch_splits.side_effect = get_changes @@ -364,20 +251,20 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizer(api, storage) split_synchronizer._backoff = Backoff(1, 1) split_synchronizer.synchronize_splits() assert api.fetch_splits.mock_calls[0][1][0] == -1 - assert api.fetch_splits.mock_calls[0][1][2].cache_control_headers == True + assert api.fetch_splits.mock_calls[0][1][1].cache_control_headers == True assert api.fetch_splits.mock_calls[1][1][0] == 123 - assert api.fetch_splits.mock_calls[1][1][2].cache_control_headers == True + assert api.fetch_splits.mock_calls[1][1][1].cache_control_headers == True split_synchronizer._backoff = Backoff(1, 0.1) split_synchronizer.synchronize_splits(12345) assert api.fetch_splits.mock_calls[3][1][0] == 1234 - assert api.fetch_splits.mock_calls[3][1][2].cache_control_headers == True - assert len(api.fetch_splits.mock_calls) == 10 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) + assert api.fetch_splits.mock_calls[3][1][1].cache_control_headers == True + assert len(api.fetch_splits.mock_calls) == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) @@ -386,36 +273,31 @@ def intersect(sets): def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorage(['set1', 'set2']) - rbs_storage = InMemoryRuleBasedSegmentStorage() - - split = copy.deepcopy(self.splits[0]) + + split = self.splits[0].copy() split['name'] = 'second' splits1 = [self.splits[0].copy(), split] - splits2 = copy.deepcopy(self.splits) - splits3 = copy.deepcopy(self.splits) - splits4 = copy.deepcopy(self.splits) + splits2 = self.splits.copy() + splits3 = self.splits.copy() + splits4 = self.splits.copy() api = mocker.Mock() def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, - 'rbs': {'t': 123, 's': 123, 'd': []}} + return { 'splits': splits1, 'since': 123, 'till': 123 } elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, - 'rbs': {'t': 124, 's': 124, 'd': []}} + return { 'splits': splits2, 'since': 124, 'till': 124 } elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, - 'rbs': {'t': 12434, 's': 12434, 'd': []}} + return { 'splits': splits3, 'since': 12434, 'till': 12434 } splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'new_split' - return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, - 'rbs': {'t': 12438, 's': 12438, 'd': []}} + return { 'splits': splits4, 'since': 12438, 'till': 12438 } get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizer(api, storage) split_synchronizer._backoff = Backoff(1, 1) split_synchronizer.synchronize_splits() assert isinstance(storage.get('some_name'), Split) @@ -432,44 +314,40 @@ def get_changes(*args, **kwargs): def test_sync_flag_sets_without_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorage() - rbs_storage = InMemoryRuleBasedSegmentStorage() - split = copy.deepcopy(self.splits[0]) + + split = self.splits[0].copy() split['name'] = 'second' splits1 = [self.splits[0].copy(), split] - splits2 = copy.deepcopy(self.splits) - splits3 = copy.deepcopy(self.splits) - splits4 = copy.deepcopy(self.splits) + splits2 = self.splits.copy() + splits3 = self.splits.copy() + splits4 = self.splits.copy() api = mocker.Mock() def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': splits1, 'since': 123, 'till': 123 } elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, - 'rbs': {"t": 124, "s": 124, "d": []}} + return { 'splits': splits2, 'since': 124, 'till': 124 } elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, - 'rbs': {"t": 12434, "s": 12434, "d": []}} + return { 'splits': splits3, 'since': 12434, 'till': 12434 } splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'third_split' - return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, - 'rbs': {"t": 12438, "s": 12438, "d": []}} + return { 'splits': splits4, 'since': 12438, 'till': 12438 } get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizer(api, storage) split_synchronizer._backoff = Backoff(1, 1) split_synchronizer.synchronize_splits() - assert isinstance(storage.get('some_name'), Split) + assert isinstance(storage.get('new_split'), Split) split_synchronizer.synchronize_splits(124) - assert isinstance(storage.get('some_name'), Split) + assert isinstance(storage.get('new_split'), Split) split_synchronizer.synchronize_splits(12434) - assert isinstance(storage.get('some_name'), Split) + assert isinstance(storage.get('new_split'), Split) split_synchronizer.synchronize_splits(12438) assert isinstance(storage.get('third_split'), Split) @@ -483,19 +361,17 @@ class SplitsSynchronizerAsyncTests(object): async def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) api = mocker.Mock() - async def run(x, y, c): + async def run(x, c): raise APIException("something broke") run._calls = 0 api.fetch_splits = run async def get_change_number(*args): return -1 - storage.get_change_number = get_change_number - rbs_storage.get_change_number = get_change_number - + storage.get_change_number = get_change_number + class flag_set_filter(): def should_filter(): return False @@ -506,7 +382,7 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizerAsync(api, storage) with pytest.raises(APIException): await split_synchronizer.synchronize_splits(1) @@ -515,24 +391,15 @@ def intersect(sets): async def test_synchronize_splits(self, mocker): """Test split sync.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) - + async def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: return -1 return 123 - async def rbs_change_number_mock(): - rbs_change_number_mock._calls += 1 - if rbs_change_number_mock._calls == 1: - return -1 - return 123 - change_number_mock._calls = 0 - rbs_change_number_mock._calls = 0 storage.get_change_number = change_number_mock - rbs_storage.get_change_number.side_effect = rbs_change_number_mock - + class flag_set_filter(): def should_filter(): return False @@ -549,42 +416,33 @@ async def update(parsed_split, deleted, chanhe_number): self.parsed_split = parsed_split storage.update = update - self.parsed_rbs = None - async def update(parsed_rbs, deleted, chanhe_number): - if len(parsed_rbs) > 0: - self.parsed_rbs = parsed_rbs - rbs_storage.update = update - api = mocker.Mock() self.change_number_1 = None self.fetch_options_1 = None self.change_number_2 = None self.fetch_options_2 = None - async def get_changes(change_number, rbs_change_number, fetch_options): + async def get_changes(change_number, fetch_options): get_changes.called += 1 if get_changes.called == 1: self.change_number_1 = change_number self.fetch_options_1 = fetch_options - return json_body + return { + 'splits': self.splits, + 'since': -1, + 'till': 123 + } else: self.change_number_2 = change_number self.fetch_options_2 = fetch_options return { - "ff": { - "t":123, - "s":123, - 'd': [] - }, - "rbs": { - "t": 123, - "s": 123, - "d": [] - } + 'splits': [], + 'since': 123, + 'till': 123 } get_changes.called = 0 api.fetch_splits = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizerAsync(api, storage) await split_synchronizer.synchronize_splits() assert (-1, FetchOptions(True)._cache_control_headers) == (self.change_number_1, self.fetch_options_1._cache_control_headers) @@ -593,17 +451,10 @@ async def get_changes(change_number, rbs_change_number, fetch_options): assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' - inserted_rbs = self.parsed_rbs[0] - assert isinstance(inserted_rbs, RuleBasedSegment) - assert inserted_rbs.name == 'sample_rule_based_segment' - - @pytest.mark.asyncio async def test_not_called_on_till(self, mocker): """Test that sync is not called when till is less than previous changenumber""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) - class flag_set_filter(): def should_filter(): return False @@ -617,8 +468,7 @@ def intersect(sets): async def change_number_mock(): return 2 storage.get_change_number = change_number_mock - rbs_storage.get_change_number.side_effect = change_number_mock - + async def get_changes(*args, **kwargs): get_changes.called += 1 return None @@ -626,7 +476,7 @@ async def get_changes(*args, **kwargs): api = mocker.Mock() api.fetch_splits = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizerAsync(api, storage) await split_synchronizer.synchronize_splits(1) assert get_changes.called == 0 @@ -635,7 +485,7 @@ async def test_synchronize_splits_cdn(self, mocker): """Test split sync with bypassing cdn.""" mocker.patch('splitio.sync.split._ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES', new=3) storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + async def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: @@ -645,27 +495,15 @@ async def change_number_mock(): elif change_number_mock._calls <= 7: return 1234 return 12345 # Return proper cn for CDN Bypass - async def rbs_change_number_mock(): - rbs_change_number_mock._calls += 1 - if rbs_change_number_mock._calls == 1: - return -1 - return 12345 # Return proper cn for CDN Bypass - change_number_mock._calls = 0 - rbs_change_number_mock._calls = 0 storage.get_change_number = change_number_mock - rbs_storage.get_change_number = rbs_change_number_mock - + self.parsed_split = None async def update(parsed_split, deleted, change_number): if len(parsed_split) > 0: self.parsed_split = parsed_split storage.update = update - async def rbs_update(parsed, deleted, change_number): - pass - rbs_storage.update = rbs_update - api = mocker.Mock() self.change_number_1 = None self.fetch_options_1 = None @@ -673,32 +511,25 @@ async def rbs_update(parsed, deleted, change_number): self.fetch_options_2 = None self.change_number_3 = None self.fetch_options_3 = None - async def get_changes(change_number, rbs_change_number, fetch_options): + async def get_changes(change_number, fetch_options): get_changes.called += 1 if get_changes.called == 1: self.change_number_1 = change_number self.fetch_options_1 = fetch_options - return { 'ff': { 'd': self.splits, 's': -1, 't': 123 }, - 'rbs': {"t": 123, "s": -1, "d": []}} + return { 'splits': self.splits, 'since': -1, 'till': 123 } elif get_changes.called == 2: self.change_number_2 = change_number self.fetch_options_2 = fetch_options - return { 'ff': { 'd': [], 's': 123, 't': 123 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': [], 'since': 123, 'till': 123 } elif get_changes.called == 3: - return { 'ff': { 'd': [], 's': 123, 't': 1234 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': [], 'since': 123, 'till': 1234 } elif get_changes.called >= 4 and get_changes.called <= 6: - return { 'ff': { 'd': [], 's': 1234, 't': 1234 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': [], 'since': 1234, 'till': 1234 } elif get_changes.called == 7: - return { 'ff': { 'd': [], 's': 1234, 't': 12345 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': [], 'since': 1234, 'till': 12345 } self.change_number_3 = change_number self.fetch_options_3 = fetch_options - return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, - 'rbs': {"t": 123, "s": 123, "d": []}} - + return { 'splits': [], 'since': 12345, 'till': 12345 } get_changes.called = 0 api.fetch_splits = get_changes @@ -713,7 +544,7 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizerAsync(api, storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() @@ -723,7 +554,7 @@ def intersect(sets): split_synchronizer._backoff = Backoff(1, 0.1) await split_synchronizer.synchronize_splits(12345) assert (12345, True, 1234) == (self.change_number_3, self.fetch_options_3.cache_control_headers, self.fetch_options_3.change_number) - assert get_changes.called == 10 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) + assert get_changes.called == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) inserted_split = self.parsed_split[0] assert isinstance(inserted_split, Split) @@ -733,8 +564,7 @@ def intersect(sets): async def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorageAsync(['set1', 'set2']) - rbs_storage = InMemoryRuleBasedSegmentStorageAsync() - + split = self.splits[0].copy() split['name'] = 'second' splits1 = [self.splits[0].copy(), split] @@ -745,25 +575,20 @@ async def test_sync_flag_sets_with_config_sets(self, mocker): async def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, - 'rbs': {'t': 123, 's': 123, 'd': []}} + return { 'splits': splits1, 'since': 123, 'till': 123 } elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, - 'rbs': {'t': 124, 's': 124, 'd': []}} + return { 'splits': splits2, 'since': 124, 'till': 124 } elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, - 'rbs': {'t': 12434, 's': 12434, 'd': []}} + return { 'splits': splits3, 'since': 12434, 'till': 12434 } splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'new_split' - return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, - 'rbs': {'t': 12438, 's': 12438, 'd': []}} - + return { 'splits': splits4, 'since': 12438, 'till': 12438 } get_changes.called = 0 api.fetch_splits = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizerAsync(api, storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() assert isinstance(await storage.get('some_name'), Split) @@ -781,7 +606,7 @@ async def get_changes(*args, **kwargs): async def test_sync_flag_sets_without_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorageAsync() - rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + split = self.splits[0].copy() split['name'] = 'second' splits1 = [self.splits[0].copy(), split] @@ -792,24 +617,20 @@ async def test_sync_flag_sets_without_config_sets(self, mocker): async def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'splits': splits1, 'since': 123, 'till': 123 } elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, - 'rbs': {"t": 124, "s": 124, "d": []}} + return { 'splits': splits2, 'since': 124, 'till': 124 } elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, - 'rbs': {"t": 12434, "s": 12434, "d": []}} + return { 'splits': splits3, 'since': 12434, 'till': 12434 } splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'third_split' - return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, - 'rbs': {"t": 12438, "s": 12438, "d": []}} + return { 'splits': splits4, 'since': 12438, 'till': 12438 } get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) + split_synchronizer = SplitSynchronizerAsync(api, storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() assert isinstance(await storage.get('new_split'), Split) From 58d5ddda54f0556731664adf3b9e925f47943fdc Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 10 Mar 2025 20:23:15 -0300 Subject: [PATCH 09/56] Update sync and api classes --- splitio/api/commons.py | 20 +- splitio/api/splits.py | 14 +- splitio/sync/split.py | 103 +++--- tests/api/test_segments_api.py | 14 +- tests/api/test_splits_api.py | 28 +- tests/sync/test_splits_synchronizer.py | 431 +++++++++++++++++-------- 6 files changed, 417 insertions(+), 193 deletions(-) diff --git a/splitio/api/commons.py b/splitio/api/commons.py index 2ca75595..9dda1ee0 100644 --- a/splitio/api/commons.py +++ b/splitio/api/commons.py @@ -57,7 +57,7 @@ def record_telemetry(status_code, elapsed, metric_name, telemetry_runtime_produc class FetchOptions(object): """Fetch Options object.""" - def __init__(self, cache_control_headers=False, change_number=None, sets=None, spec=SPEC_VERSION): + def __init__(self, cache_control_headers=False, change_number=None, rbs_change_number=None, sets=None, spec=SPEC_VERSION): """ Class constructor. @@ -72,6 +72,7 @@ def __init__(self, cache_control_headers=False, change_number=None, sets=None, s """ self._cache_control_headers = cache_control_headers self._change_number = change_number + self._rbs_change_number = rbs_change_number self._sets = sets self._spec = spec @@ -85,6 +86,11 @@ def change_number(self): """Return change number.""" return self._change_number + @property + def rbs_change_number(self): + """Return change number.""" + return self._rbs_change_number + @property def sets(self): """Return sets.""" @@ -103,14 +109,19 @@ def __eq__(self, other): if self._change_number != other._change_number: return False + if self._rbs_change_number != other._rbs_change_number: + return False + if self._sets != other._sets: return False + if self._spec != other._spec: return False + return True -def build_fetch(change_number, fetch_options, metadata): +def build_fetch(change_number, fetch_options, metadata, rbs_change_number=None): """ Build fetch with new flags if that is the case. @@ -123,11 +134,16 @@ def build_fetch(change_number, fetch_options, metadata): :param metadata: Metadata Headers. :type metadata: dict + :param rbs_change_number: Last known timestamp of a rule based segment modification. + :type rbs_change_number: int + :return: Objects for fetch :rtype: dict, dict """ query = {'s': fetch_options.spec} if fetch_options.spec is not None else {} query['since'] = change_number + if rbs_change_number is not None: + query['rbSince'] = rbs_change_number extra_headers = metadata if fetch_options is None: return query, extra_headers diff --git a/splitio/api/splits.py b/splitio/api/splits.py index 692fde3b..f013497a 100644 --- a/splitio/api/splits.py +++ b/splitio/api/splits.py @@ -31,13 +31,16 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._telemetry_runtime_producer = telemetry_runtime_producer self._client.set_telemetry_data(HTTPExceptionsAndLatencies.SPLIT, self._telemetry_runtime_producer) - def fetch_splits(self, change_number, fetch_options): + def fetch_splits(self, change_number, rbs_change_number, fetch_options): """ Fetch feature flags from backend. :param change_number: Last known timestamp of a split modification. :type change_number: int + :param rbs_change_number: Last known timestamp of a rule based segment modification. + :type rbs_change_number: int + :param fetch_options: Fetch options for getting feature flag definitions. :type fetch_options: splitio.api.commons.FetchOptions @@ -45,7 +48,7 @@ def fetch_splits(self, change_number, fetch_options): :rtype: dict """ try: - query, extra_headers = build_fetch(change_number, fetch_options, self._metadata) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = self._client.get( 'sdk', 'splitChanges', @@ -86,12 +89,15 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._telemetry_runtime_producer = telemetry_runtime_producer self._client.set_telemetry_data(HTTPExceptionsAndLatencies.SPLIT, self._telemetry_runtime_producer) - async def fetch_splits(self, change_number, fetch_options): + async def fetch_splits(self, change_number, rbs_change_number, fetch_options): """ Fetch feature flags from backend. :param change_number: Last known timestamp of a split modification. :type change_number: int + + :param rbs_change_number: Last known timestamp of a rule based segment modification. + :type rbs_change_number: int :param fetch_options: Fetch options for getting feature flag definitions. :type fetch_options: splitio.api.commons.FetchOptions @@ -100,7 +106,7 @@ async def fetch_splits(self, change_number, fetch_options): :rtype: dict """ try: - query, extra_headers = build_fetch(change_number, fetch_options, self._metadata) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = await self._client.get( 'sdk', 'splitChanges', diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 7bb13117..e24a21a0 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -10,10 +10,11 @@ from splitio.api import APIException, APIUriException from splitio.api.commons import FetchOptions from splitio.client.input_validator import validate_flag_sets -from splitio.models import splits +from splitio.models import splits, rule_based_segments from splitio.util.backoff import Backoff from splitio.util.time import get_current_epoch_time_ms -from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async +from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async, \ + update_rule_based_segment_storage, update_rule_based_segment_storage_async from splitio.sync import util from splitio.optional.loaders import asyncio, aiofiles @@ -32,7 +33,7 @@ class SplitSynchronizerBase(object): """Feature Flag changes synchronizer.""" - def __init__(self, feature_flag_api, feature_flag_storage): + def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage): """ Class constructor. @@ -44,6 +45,7 @@ def __init__(self, feature_flag_api, feature_flag_storage): """ self._api = feature_flag_api self._feature_flag_storage = feature_flag_storage + self._rule_based_segment_storage = rule_based_segment_storage self._backoff = Backoff( _ON_DEMAND_FETCH_BACKOFF_BASE, _ON_DEMAND_FETCH_BACKOFF_MAX_WAIT) @@ -53,6 +55,11 @@ def feature_flag_storage(self): """Return Feature_flag storage object""" return self._feature_flag_storage + @property + def rule_based_segment_storage(self): + """Return rule base segment storage object""" + return self._rule_based_segment_storage + def _get_config_sets(self): """ Get all filter flag sets cnverrted to string, if no filter flagsets exist return None @@ -67,7 +74,7 @@ def _get_config_sets(self): class SplitSynchronizer(SplitSynchronizerBase): """Feature Flag changes synchronizer.""" - def __init__(self, feature_flag_api, feature_flag_storage): + def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage): """ Class constructor. @@ -77,7 +84,7 @@ def __init__(self, feature_flag_api, feature_flag_storage): :param feature_flag_storage: Feature Flag Storage. :type feature_flag_storage: splitio.storage.InMemorySplitStorage """ - SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage) + SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) def _fetch_until(self, fetch_options, till=None): """ @@ -97,12 +104,17 @@ def _fetch_until(self, fetch_options, till=None): change_number = self._feature_flag_storage.get_change_number() if change_number is None: change_number = -1 - if till is not None and till < change_number: + + rbs_change_number = self._rule_based_segment_storage.get_change_number() + if rbs_change_number is None: + rbs_change_number = -1 + + if till is not None and till < change_number and till < rbs_change_number: # the passed till is less than change_number, no need to perform updates - return change_number, segment_list + return change_number, rbs_change_number, segment_list try: - feature_flag_changes = self._api.fetch_splits(change_number, fetch_options) + feature_flag_changes = self._api.fetch_splits(change_number, rbs_change_number, fetch_options) except APIException as exc: if exc._status_code is not None and exc._status_code == 414: _LOGGER.error('Exception caught: the amount of flag sets provided are big causing uri length error.') @@ -112,15 +124,16 @@ def _fetch_until(self, fetch_options, till=None): _LOGGER.error('Exception raised while fetching feature flags') _LOGGER.debug('Exception information: ', exc_info=True) raise exc - fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('splits', [])] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes['till']) - if feature_flag_changes['till'] == feature_flag_changes['since']: - return feature_flag_changes['till'], segment_list - - fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('splits', [])] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes['till']) - if feature_flag_changes['till'] == feature_flag_changes['since']: - return feature_flag_changes['till'], segment_list + + fetched_rule_based_segments = [(rule_based_segments.from_raw(rule_based_segment)) for rule_based_segment in feature_flag_changes.get('rbs').get('d', [])] + rbs_segment_list = update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t']) + + fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] + segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t']) + segment_list.update(rbs_segment_list) + + if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: + return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list def _attempt_feature_flag_sync(self, fetch_options, till=None): """ @@ -140,13 +153,13 @@ def _attempt_feature_flag_sync(self, fetch_options, till=None): remaining_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES while True: remaining_attempts -= 1 - change_number, segment_list = self._fetch_until(fetch_options, till) + change_number, rbs_change_number, segment_list = self._fetch_until(fetch_options, till) final_segment_list.update(segment_list) - if till is None or till <= change_number: - return True, remaining_attempts, change_number, final_segment_list + if till is None or (till <= change_number and till <= rbs_change_number): + return True, remaining_attempts, change_number, rbs_change_number, final_segment_list elif remaining_attempts <= 0: - return False, remaining_attempts, change_number, final_segment_list + return False, remaining_attempts, change_number, rbs_change_number, final_segment_list how_long = self._backoff.get() time.sleep(how_long) @@ -172,7 +185,7 @@ def synchronize_splits(self, till=None): """ final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache - successful_sync, remaining_attempts, change_number, segment_list = self._attempt_feature_flag_sync(fetch_options, + successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = self._attempt_feature_flag_sync(fetch_options, till) final_segment_list.update(segment_list) attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts @@ -180,8 +193,8 @@ def synchronize_splits(self, till=None): _LOGGER.debug('Refresh completed in %d attempts.', attempts) return final_segment_list - with_cdn_bypass = FetchOptions(True, change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN - without_cdn_successful_sync, remaining_attempts, change_number, segment_list = self._attempt_feature_flag_sync(with_cdn_bypass, till) + with_cdn_bypass = FetchOptions(True, change_number, rbs_change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN + without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = self._attempt_feature_flag_sync(with_cdn_bypass, till) final_segment_list.update(segment_list) without_cdn_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if without_cdn_successful_sync: @@ -208,7 +221,7 @@ def kill_split(self, feature_flag_name, default_treatment, change_number): class SplitSynchronizerAsync(SplitSynchronizerBase): """Feature Flag changes synchronizer async.""" - def __init__(self, feature_flag_api, feature_flag_storage): + def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage): """ Class constructor. @@ -218,7 +231,7 @@ def __init__(self, feature_flag_api, feature_flag_storage): :param feature_flag_storage: Feature Flag Storage. :type feature_flag_storage: splitio.storage.InMemorySplitStorage """ - SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage) + SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) async def _fetch_until(self, fetch_options, till=None): """ @@ -238,12 +251,17 @@ async def _fetch_until(self, fetch_options, till=None): change_number = await self._feature_flag_storage.get_change_number() if change_number is None: change_number = -1 - if till is not None and till < change_number: + + rbs_change_number = await self._rule_based_segment_storage.get_change_number() + if rbs_change_number is None: + rbs_change_number = -1 + + if till is not None and till < change_number and till < rbs_change_number: # the passed till is less than change_number, no need to perform updates - return change_number, segment_list + return change_number, rbs_change_number, segment_list try: - feature_flag_changes = await self._api.fetch_splits(change_number, fetch_options) + feature_flag_changes = await self._api.fetch_splits(change_number, rbs_change_number, fetch_options) except APIException as exc: if exc._status_code is not None and exc._status_code == 414: _LOGGER.error('Exception caught: the amount of flag sets provided are big causing uri length error.') @@ -254,10 +272,15 @@ async def _fetch_until(self, fetch_options, till=None): _LOGGER.debug('Exception information: ', exc_info=True) raise exc - fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('splits', [])] - segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes['till']) - if feature_flag_changes['till'] == feature_flag_changes['since']: - return feature_flag_changes['till'], segment_list + fetched_rule_based_segments = [(rule_based_segments.from_raw(rule_based_segment)) for rule_based_segment in feature_flag_changes.get('rbs').get('d', [])] + rbs_segment_list = await update_rule_based_segment_storage_async(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t']) + + fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] + segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t']) + segment_list.update(rbs_segment_list) + + if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: + return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list async def _attempt_feature_flag_sync(self, fetch_options, till=None): """ @@ -277,13 +300,13 @@ async def _attempt_feature_flag_sync(self, fetch_options, till=None): remaining_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES while True: remaining_attempts -= 1 - change_number, segment_list = await self._fetch_until(fetch_options, till) + change_number, rbs_change_number, segment_list = await self._fetch_until(fetch_options, till) final_segment_list.update(segment_list) - if till is None or till <= change_number: - return True, remaining_attempts, change_number, final_segment_list + if till is None or (till <= change_number and till <= rbs_change_number): + return True, remaining_attempts, change_number, rbs_change_number, final_segment_list elif remaining_attempts <= 0: - return False, remaining_attempts, change_number, final_segment_list + return False, remaining_attempts, change_number, rbs_change_number, final_segment_list how_long = self._backoff.get() await asyncio.sleep(how_long) @@ -297,7 +320,7 @@ async def synchronize_splits(self, till=None): """ final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache - successful_sync, remaining_attempts, change_number, segment_list = await self._attempt_feature_flag_sync(fetch_options, + successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = await self._attempt_feature_flag_sync(fetch_options, till) final_segment_list.update(segment_list) attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts @@ -305,8 +328,8 @@ async def synchronize_splits(self, till=None): _LOGGER.debug('Refresh completed in %d attempts.', attempts) return final_segment_list - with_cdn_bypass = FetchOptions(True, change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN - without_cdn_successful_sync, remaining_attempts, change_number, segment_list = await self._attempt_feature_flag_sync(with_cdn_bypass, till) + with_cdn_bypass = FetchOptions(True, change_number, rbs_change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN + without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = await self._attempt_feature_flag_sync(with_cdn_bypass, till) final_segment_list.update(segment_list) without_cdn_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if without_cdn_successful_sync: diff --git a/tests/api/test_segments_api.py b/tests/api/test_segments_api.py index 73e3efe7..8681be59 100644 --- a/tests/api/test_segments_api.py +++ b/tests/api/test_segments_api.py @@ -16,7 +16,7 @@ def test_fetch_segment_changes(self, mocker): httpclient.get.return_value = client.HttpResponse(200, '{"prop1": "value1"}', {}) segment_api = segments.SegmentsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) - response = segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None)) + response = segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None, None)) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'segmentChanges/some_segment', 'some_api_key', extra_headers={ @@ -27,7 +27,7 @@ def test_fetch_segment_changes(self, mocker): query={'since': 123})] httpclient.reset_mock() - response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None)) + response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None, None)) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'segmentChanges/some_segment', 'some_api_key', extra_headers={ @@ -39,7 +39,7 @@ def test_fetch_segment_changes(self, mocker): query={'since': 123})] httpclient.reset_mock() - response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None)) + response = segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None, None)) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'segmentChanges/some_segment', 'some_api_key', extra_headers={ @@ -83,7 +83,7 @@ async def get(verb, url, key, query, extra_headers): return client.HttpResponse(200, '{"prop1": "value1"}', {}) httpclient.get = get - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'segmentChanges/some_segment' @@ -96,7 +96,7 @@ async def get(verb, url, key, query, extra_headers): assert self.query == {'since': 123} httpclient.reset_mock() - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, None, None, None, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'segmentChanges/some_segment' @@ -110,7 +110,7 @@ async def get(verb, url, key, query, extra_headers): assert self.query == {'since': 123} httpclient.reset_mock() - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(True, 123, None, None, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'segmentChanges/some_segment' @@ -128,6 +128,6 @@ def raise_exception(*args, **kwargs): raise client.HttpClientException('some_message') httpclient.get = raise_exception with pytest.raises(APIException) as exc_info: - response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None)) + response = await segment_api.fetch_segment('some_segment', 123, FetchOptions(None, None, None, None, None)) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' diff --git a/tests/api/test_splits_api.py b/tests/api/test_splits_api.py index d1d276b7..1826ec23 100644 --- a/tests/api/test_splits_api.py +++ b/tests/api/test_splits_api.py @@ -16,7 +16,7 @@ def test_fetch_split_changes(self, mocker): httpclient.get.return_value = client.HttpResponse(200, '{"prop1": "value1"}', {}) split_api = splits.SplitsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) - response = split_api.fetch_splits(123, FetchOptions(False, None, 'set1,set2')) + response = split_api.fetch_splits(123, -1, FetchOptions(False, None, None, 'set1,set2')) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'splitChanges', 'some_api_key', extra_headers={ @@ -24,10 +24,10 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' }, - query={'s': '1.1', 'since': 123, 'sets': 'set1,set2'})] + query={'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'})] httpclient.reset_mock() - response = split_api.fetch_splits(123, FetchOptions(True, 123, 'set3')) + response = split_api.fetch_splits(123, 1, FetchOptions(True, 123, None,'set3')) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'splitChanges', 'some_api_key', extra_headers={ @@ -36,10 +36,10 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'till': 123, 'sets': 'set3'})] + query={'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() - response = split_api.fetch_splits(123, FetchOptions(True, 123, 'set3')) + response = split_api.fetch_splits(123, 122, FetchOptions(True, 123, None, 'set3')) assert response['prop1'] == 'value1' assert httpclient.get.mock_calls == [mocker.call('sdk', 'splitChanges', 'some_api_key', extra_headers={ @@ -48,14 +48,14 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'till': 123, 'sets': 'set3'})] + query={'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() def raise_exception(*args, **kwargs): raise client.HttpClientException('some_message') httpclient.get.side_effect = raise_exception with pytest.raises(APIException) as exc_info: - response = split_api.fetch_splits(123, FetchOptions()) + response = split_api.fetch_splits(123, 12, FetchOptions()) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' @@ -82,7 +82,7 @@ async def get(verb, url, key, query, extra_headers): return client.HttpResponse(200, '{"prop1": "value1"}', {}) httpclient.get = get - response = await split_api.fetch_splits(123, FetchOptions(False, None, 'set1,set2')) + response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, 'set1,set2')) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'splitChanges' @@ -92,10 +92,10 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' } - assert self.query == {'s': '1.1', 'since': 123, 'sets': 'set1,set2'} + assert self.query == {'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'} httpclient.reset_mock() - response = await split_api.fetch_splits(123, FetchOptions(True, 123, 'set3')) + response = await split_api.fetch_splits(123, 1, FetchOptions(True, 123, None, 'set3')) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'splitChanges' @@ -106,10 +106,10 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'till': 123, 'sets': 'set3'} + assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'} httpclient.reset_mock() - response = await split_api.fetch_splits(123, FetchOptions(True, 123)) + response = await split_api.fetch_splits(123, 122, FetchOptions(True, 123, None)) assert response['prop1'] == 'value1' assert self.verb == 'sdk' assert self.url == 'splitChanges' @@ -120,13 +120,13 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'till': 123} + assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123} httpclient.reset_mock() def raise_exception(*args, **kwargs): raise client.HttpClientException('some_message') httpclient.get = raise_exception with pytest.raises(APIException) as exc_info: - response = await split_api.fetch_splits(123, FetchOptions()) + response = await split_api.fetch_splits(123, 12, FetchOptions()) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index b5aafd51..470c2241 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -9,9 +9,10 @@ from splitio.api import APIException from splitio.api.commons import FetchOptions from splitio.storage import SplitStorage -from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySplitStorageAsync +from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySplitStorageAsync, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.storage import FlagSetsFilter from splitio.models.splits import Split +from splitio.models.rule_based_segments import RuleBasedSegment from splitio.sync.split import SplitSynchronizer, SplitSynchronizerAsync, LocalSplitSynchronizer, LocalSplitSynchronizerAsync, LocalhostMode from splitio.optional.loaders import aiofiles, asyncio from tests.integration import splits_json @@ -52,42 +53,112 @@ 'sets': ['set1', 'set2'] }] -json_body = {'splits': [{ - 'changeNumber': 123, - 'trafficTypeName': 'user', - 'name': 'some_name', - 'trafficAllocation': 100, - 'trafficAllocationSeed': 123456, - 'seed': 321654, - 'status': 'ACTIVE', - 'killed': False, - 'defaultTreatment': 'off', - 'algo': 2, - 'conditions': [ - { - 'partitions': [ - {'treatment': 'on', 'size': 50}, - {'treatment': 'off', 'size': 50} - ], - 'contitionType': 'WHITELIST', - 'label': 'some_label', - 'matcherGroup': { - 'matchers': [ - { - 'matcherType': 'WHITELIST', - 'whitelistMatcherData': { - 'whitelist': ['k1', 'k2', 'k3'] - }, - 'negate': False, - } +json_body = { + "ff": { + "t":1675095324253, + "s":-1, + 'd': [{ + 'changeNumber': 123, + 'trafficTypeName': 'user', + 'name': 'some_name', + 'trafficAllocation': 100, + 'trafficAllocationSeed': 123456, + 'seed': 321654, + 'status': 'ACTIVE', + 'killed': False, + 'defaultTreatment': 'off', + 'algo': 2, + 'conditions': [ + { + 'partitions': [ + {'treatment': 'on', 'size': 50}, + {'treatment': 'off', 'size': 50} ], - 'combiner': 'AND' + 'contitionType': 'WHITELIST', + 'label': 'some_label', + 'matcherGroup': { + 'matchers': [ + { + 'matcherType': 'WHITELIST', + 'whitelistMatcherData': { + 'whitelist': ['k1', 'k2', 'k3'] + }, + 'negate': False, + } + ], + 'combiner': 'AND' + } + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }, + ], + 'sets': ['set1', 'set2']}] + }, + "rbs": { + "t": 1675095324253, + "s": -1, + "d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": False, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] } - } - ], - 'sets': ['set1', 'set2']}], - "till":1675095324253, - "since":-1, + } + ] + } + ] + } } class SplitsSynchronizerTests(object): @@ -98,13 +169,16 @@ class SplitsSynchronizerTests(object): def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=InMemorySplitStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) api = mocker.Mock() - def run(x, c): + def run(x, y, c): raise APIException("something broke") run._calls = 0 api.fetch_splits.side_effect = run storage.get_change_number.return_value = -1 + rbs_storage.get_change_number.return_value = -1 + class flag_set_filter(): def should_filter(): return False @@ -115,7 +189,7 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) with pytest.raises(APIException): split_synchronizer.synchronize_splits(1) @@ -123,21 +197,32 @@ def intersect(sets): def test_synchronize_splits(self, mocker): """Test split sync.""" storage = mocker.Mock(spec=InMemorySplitStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: return -1 return 123 + + def rbs_change_number_mock(): + rbs_change_number_mock._calls += 1 + if rbs_change_number_mock._calls == 1: + return -1 + return 123 + change_number_mock._calls = 0 + rbs_change_number_mock._calls = 0 storage.get_change_number.side_effect = change_number_mock - + rbs_storage.get_change_number.side_effect = rbs_change_number_mock + class flag_set_filter(): def should_filter(): return False def intersect(sets): return True + storage.flag_set_filter = flag_set_filter storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] @@ -147,35 +232,46 @@ def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { - 'splits': self.splits, - 'since': -1, - 'till': 123 - } + return json_body else: return { - 'splits': [], - 'since': 123, - 'till': 123 + "ff": { + "t":123, + "s":123, + 'd': [] + }, + "rbs": { + "t": 5, + "s": 5, + "d": [] + } } + get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer.synchronize_splits() - + assert api.fetch_splits.mock_calls[0][1][0] == -1 - assert api.fetch_splits.mock_calls[0][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[0][1][2].cache_control_headers == True assert api.fetch_splits.mock_calls[1][1][0] == 123 - assert api.fetch_splits.mock_calls[1][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[1][1][1] == 123 + assert api.fetch_splits.mock_calls[1][1][2].cache_control_headers == True inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' + inserted_rbs = rbs_storage.update.mock_calls[0][1][0][0] + assert isinstance(inserted_rbs, RuleBasedSegment) + assert inserted_rbs.name == 'sample_rule_based_segment' + def test_not_called_on_till(self, mocker): """Test that sync is not called when till is less than previous changenumber""" storage = mocker.Mock(spec=InMemorySplitStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + class flag_set_filter(): def should_filter(): return False @@ -189,6 +285,7 @@ def intersect(sets): def change_number_mock(): return 2 storage.get_change_number.side_effect = change_number_mock + rbs_storage.get_change_number.side_effect = change_number_mock def get_changes(*args, **kwargs): get_changes.called += 1 @@ -199,7 +296,7 @@ def get_changes(*args, **kwargs): api = mocker.Mock() api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer.synchronize_splits(1) assert get_changes.called == 0 @@ -209,6 +306,7 @@ def test_synchronize_splits_cdn(self, mocker): mocker.patch('splitio.sync.split._ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES', new=3) storage = mocker.Mock(spec=InMemorySplitStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) def change_number_mock(): change_number_mock._calls += 1 @@ -219,24 +317,39 @@ def change_number_mock(): elif change_number_mock._calls <= 7: return 1234 return 12345 # Return proper cn for CDN Bypass + + def rbs_change_number_mock(): + rbs_change_number_mock._calls += 1 + if rbs_change_number_mock._calls == 1: + return -1 + return 12345 # Return proper cn for CDN Bypass + change_number_mock._calls = 0 + rbs_change_number_mock._calls = 0 storage.get_change_number.side_effect = change_number_mock + rbs_storage.get_change_number.side_effect = rbs_change_number_mock api = mocker.Mock() def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': self.splits, 'since': -1, 'till': 123 } + return { 'ff': { 'd': self.splits, 's': -1, 't': 123 }, + 'rbs': {"t": 123, "s": -1, "d": []}} elif get_changes.called == 2: - return { 'splits': [], 'since': 123, 'till': 123 } + return { 'ff': { 'd': [], 's': 123, 't': 123 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 3: - return { 'splits': [], 'since': 123, 'till': 1234 } + return { 'ff': { 'd': [], 's': 123, 't': 1234 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called >= 4 and get_changes.called <= 6: - return { 'splits': [], 'since': 1234, 'till': 1234 } + return { 'ff': { 'd': [], 's': 1234, 't': 1234 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 7: - return { 'splits': [], 'since': 1234, 'till': 12345 } - return { 'splits': [], 'since': 12345, 'till': 12345 } + return { 'ff': { 'd': [], 's': 1234, 't': 12345 }, + 'rbs': {"t": 123, "s": 123, "d": []}} + return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, + 'rbs': {"t": 123, "s": 123, "d": []}} get_changes.called = 0 api.fetch_splits.side_effect = get_changes @@ -251,20 +364,20 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) split_synchronizer.synchronize_splits() assert api.fetch_splits.mock_calls[0][1][0] == -1 - assert api.fetch_splits.mock_calls[0][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[0][1][2].cache_control_headers == True assert api.fetch_splits.mock_calls[1][1][0] == 123 - assert api.fetch_splits.mock_calls[1][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[1][1][2].cache_control_headers == True split_synchronizer._backoff = Backoff(1, 0.1) split_synchronizer.synchronize_splits(12345) assert api.fetch_splits.mock_calls[3][1][0] == 1234 - assert api.fetch_splits.mock_calls[3][1][1].cache_control_headers == True - assert len(api.fetch_splits.mock_calls) == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) + assert api.fetch_splits.mock_calls[3][1][2].cache_control_headers == True + assert len(api.fetch_splits.mock_calls) == 10 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) @@ -273,31 +386,36 @@ def intersect(sets): def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorage(['set1', 'set2']) - - split = self.splits[0].copy() + rbs_storage = InMemoryRuleBasedSegmentStorage() + + split = copy.deepcopy(self.splits[0]) split['name'] = 'second' splits1 = [self.splits[0].copy(), split] - splits2 = self.splits.copy() - splits3 = self.splits.copy() - splits4 = self.splits.copy() + splits2 = copy.deepcopy(self.splits) + splits3 = copy.deepcopy(self.splits) + splits4 = copy.deepcopy(self.splits) api = mocker.Mock() def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': splits1, 'since': 123, 'till': 123 } + return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, + 'rbs': {'t': 123, 's': 123, 'd': []}} elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'splits': splits2, 'since': 124, 'till': 124 } + return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, + 'rbs': {'t': 124, 's': 124, 'd': []}} elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'splits': splits3, 'since': 12434, 'till': 12434 } + return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, + 'rbs': {'t': 12434, 's': 12434, 'd': []}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'new_split' - return { 'splits': splits4, 'since': 12438, 'till': 12438 } + return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, + 'rbs': {'t': 12438, 's': 12438, 'd': []}} get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) split_synchronizer.synchronize_splits() assert isinstance(storage.get('some_name'), Split) @@ -314,40 +432,44 @@ def get_changes(*args, **kwargs): def test_sync_flag_sets_without_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorage() - - split = self.splits[0].copy() + rbs_storage = InMemoryRuleBasedSegmentStorage() + split = copy.deepcopy(self.splits[0]) split['name'] = 'second' splits1 = [self.splits[0].copy(), split] - splits2 = self.splits.copy() - splits3 = self.splits.copy() - splits4 = self.splits.copy() + splits2 = copy.deepcopy(self.splits) + splits3 = copy.deepcopy(self.splits) + splits4 = copy.deepcopy(self.splits) api = mocker.Mock() def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': splits1, 'since': 123, 'till': 123 } + return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'splits': splits2, 'since': 124, 'till': 124 } + return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, + 'rbs': {"t": 124, "s": 124, "d": []}} elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'splits': splits3, 'since': 12434, 'till': 12434 } + return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, + 'rbs': {"t": 12434, "s": 12434, "d": []}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'third_split' - return { 'splits': splits4, 'since': 12438, 'till': 12438 } + return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, + 'rbs': {"t": 12438, "s": 12438, "d": []}} get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) split_synchronizer.synchronize_splits() - assert isinstance(storage.get('new_split'), Split) + assert isinstance(storage.get('some_name'), Split) split_synchronizer.synchronize_splits(124) - assert isinstance(storage.get('new_split'), Split) + assert isinstance(storage.get('some_name'), Split) split_synchronizer.synchronize_splits(12434) - assert isinstance(storage.get('new_split'), Split) + assert isinstance(storage.get('some_name'), Split) split_synchronizer.synchronize_splits(12438) assert isinstance(storage.get('third_split'), Split) @@ -361,17 +483,19 @@ class SplitsSynchronizerAsyncTests(object): async def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) api = mocker.Mock() - async def run(x, c): + async def run(x, y, c): raise APIException("something broke") run._calls = 0 api.fetch_splits = run async def get_change_number(*args): return -1 - storage.get_change_number = get_change_number - + storage.get_change_number = get_change_number + rbs_storage.get_change_number = get_change_number + class flag_set_filter(): def should_filter(): return False @@ -382,7 +506,7 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) with pytest.raises(APIException): await split_synchronizer.synchronize_splits(1) @@ -391,15 +515,24 @@ def intersect(sets): async def test_synchronize_splits(self, mocker): """Test split sync.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + async def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: return -1 return 123 + async def rbs_change_number_mock(): + rbs_change_number_mock._calls += 1 + if rbs_change_number_mock._calls == 1: + return -1 + return 123 + change_number_mock._calls = 0 + rbs_change_number_mock._calls = 0 storage.get_change_number = change_number_mock - + rbs_storage.get_change_number.side_effect = rbs_change_number_mock + class flag_set_filter(): def should_filter(): return False @@ -416,33 +549,42 @@ async def update(parsed_split, deleted, chanhe_number): self.parsed_split = parsed_split storage.update = update + self.parsed_rbs = None + async def update(parsed_rbs, deleted, chanhe_number): + if len(parsed_rbs) > 0: + self.parsed_rbs = parsed_rbs + rbs_storage.update = update + api = mocker.Mock() self.change_number_1 = None self.fetch_options_1 = None self.change_number_2 = None self.fetch_options_2 = None - async def get_changes(change_number, fetch_options): + async def get_changes(change_number, rbs_change_number, fetch_options): get_changes.called += 1 if get_changes.called == 1: self.change_number_1 = change_number self.fetch_options_1 = fetch_options - return { - 'splits': self.splits, - 'since': -1, - 'till': 123 - } + return json_body else: self.change_number_2 = change_number self.fetch_options_2 = fetch_options return { - 'splits': [], - 'since': 123, - 'till': 123 + "ff": { + "t":123, + "s":123, + 'd': [] + }, + "rbs": { + "t": 123, + "s": 123, + "d": [] + } } get_changes.called = 0 api.fetch_splits = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) await split_synchronizer.synchronize_splits() assert (-1, FetchOptions(True)._cache_control_headers) == (self.change_number_1, self.fetch_options_1._cache_control_headers) @@ -451,10 +593,17 @@ async def get_changes(change_number, fetch_options): assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' + inserted_rbs = self.parsed_rbs[0] + assert isinstance(inserted_rbs, RuleBasedSegment) + assert inserted_rbs.name == 'sample_rule_based_segment' + + @pytest.mark.asyncio async def test_not_called_on_till(self, mocker): """Test that sync is not called when till is less than previous changenumber""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + class flag_set_filter(): def should_filter(): return False @@ -468,7 +617,8 @@ def intersect(sets): async def change_number_mock(): return 2 storage.get_change_number = change_number_mock - + rbs_storage.get_change_number.side_effect = change_number_mock + async def get_changes(*args, **kwargs): get_changes.called += 1 return None @@ -476,7 +626,7 @@ async def get_changes(*args, **kwargs): api = mocker.Mock() api.fetch_splits = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) await split_synchronizer.synchronize_splits(1) assert get_changes.called == 0 @@ -485,7 +635,7 @@ async def test_synchronize_splits_cdn(self, mocker): """Test split sync with bypassing cdn.""" mocker.patch('splitio.sync.split._ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES', new=3) storage = mocker.Mock(spec=InMemorySplitStorageAsync) - + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) async def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: @@ -495,15 +645,27 @@ async def change_number_mock(): elif change_number_mock._calls <= 7: return 1234 return 12345 # Return proper cn for CDN Bypass + async def rbs_change_number_mock(): + rbs_change_number_mock._calls += 1 + if rbs_change_number_mock._calls == 1: + return -1 + return 12345 # Return proper cn for CDN Bypass + change_number_mock._calls = 0 + rbs_change_number_mock._calls = 0 storage.get_change_number = change_number_mock - + rbs_storage.get_change_number = rbs_change_number_mock + self.parsed_split = None async def update(parsed_split, deleted, change_number): if len(parsed_split) > 0: self.parsed_split = parsed_split storage.update = update + async def rbs_update(parsed, deleted, change_number): + pass + rbs_storage.update = rbs_update + api = mocker.Mock() self.change_number_1 = None self.fetch_options_1 = None @@ -511,25 +673,32 @@ async def update(parsed_split, deleted, change_number): self.fetch_options_2 = None self.change_number_3 = None self.fetch_options_3 = None - async def get_changes(change_number, fetch_options): + async def get_changes(change_number, rbs_change_number, fetch_options): get_changes.called += 1 if get_changes.called == 1: self.change_number_1 = change_number self.fetch_options_1 = fetch_options - return { 'splits': self.splits, 'since': -1, 'till': 123 } + return { 'ff': { 'd': self.splits, 's': -1, 't': 123 }, + 'rbs': {"t": 123, "s": -1, "d": []}} elif get_changes.called == 2: self.change_number_2 = change_number self.fetch_options_2 = fetch_options - return { 'splits': [], 'since': 123, 'till': 123 } + return { 'ff': { 'd': [], 's': 123, 't': 123 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 3: - return { 'splits': [], 'since': 123, 'till': 1234 } + return { 'ff': { 'd': [], 's': 123, 't': 1234 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called >= 4 and get_changes.called <= 6: - return { 'splits': [], 'since': 1234, 'till': 1234 } + return { 'ff': { 'd': [], 's': 1234, 't': 1234 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 7: - return { 'splits': [], 'since': 1234, 'till': 12345 } + return { 'ff': { 'd': [], 's': 1234, 't': 12345 }, + 'rbs': {"t": 123, "s": 123, "d": []}} self.change_number_3 = change_number self.fetch_options_3 = fetch_options - return { 'splits': [], 'since': 12345, 'till': 12345 } + return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, + 'rbs': {"t": 123, "s": 123, "d": []}} + get_changes.called = 0 api.fetch_splits = get_changes @@ -544,7 +713,7 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() @@ -554,7 +723,7 @@ def intersect(sets): split_synchronizer._backoff = Backoff(1, 0.1) await split_synchronizer.synchronize_splits(12345) assert (12345, True, 1234) == (self.change_number_3, self.fetch_options_3.cache_control_headers, self.fetch_options_3.change_number) - assert get_changes.called == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) + assert get_changes.called == 10 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) inserted_split = self.parsed_split[0] assert isinstance(inserted_split, Split) @@ -564,7 +733,8 @@ def intersect(sets): async def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorageAsync(['set1', 'set2']) - + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + split = self.splits[0].copy() split['name'] = 'second' splits1 = [self.splits[0].copy(), split] @@ -575,20 +745,25 @@ async def test_sync_flag_sets_with_config_sets(self, mocker): async def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': splits1, 'since': 123, 'till': 123 } + return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, + 'rbs': {'t': 123, 's': 123, 'd': []}} elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'splits': splits2, 'since': 124, 'till': 124 } + return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, + 'rbs': {'t': 124, 's': 124, 'd': []}} elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'splits': splits3, 'since': 12434, 'till': 12434 } + return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, + 'rbs': {'t': 12434, 's': 12434, 'd': []}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'new_split' - return { 'splits': splits4, 'since': 12438, 'till': 12438 } + return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, + 'rbs': {'t': 12438, 's': 12438, 'd': []}} + get_changes.called = 0 api.fetch_splits = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() assert isinstance(await storage.get('some_name'), Split) @@ -606,7 +781,7 @@ async def get_changes(*args, **kwargs): async def test_sync_flag_sets_without_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorageAsync() - + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() split = self.splits[0].copy() split['name'] = 'second' splits1 = [self.splits[0].copy(), split] @@ -617,20 +792,24 @@ async def test_sync_flag_sets_without_config_sets(self, mocker): async def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { 'splits': splits1, 'since': 123, 'till': 123 } + return { 'ff': { 'd': splits1, 's': 123, 't': 123 }, + 'rbs': {"t": 123, "s": 123, "d": []}} elif get_changes.called == 2: splits2[0]['sets'] = ['set3'] - return { 'splits': splits2, 'since': 124, 'till': 124 } + return { 'ff': { 'd': splits2, 's': 124, 't': 124 }, + 'rbs': {"t": 124, "s": 124, "d": []}} elif get_changes.called == 3: splits3[0]['sets'] = ['set1'] - return { 'splits': splits3, 'since': 12434, 'till': 12434 } + return { 'ff': { 'd': splits3, 's': 12434, 't': 12434 }, + 'rbs': {"t": 12434, "s": 12434, "d": []}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'third_split' - return { 'splits': splits4, 'since': 12438, 'till': 12438 } + return { 'ff': { 'd': splits4, 's': 12438, 't': 12438 }, + 'rbs': {"t": 12438, "s": 12438, "d": []}} get_changes.called = 0 api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() assert isinstance(await storage.get('new_split'), Split) From 6611a43d98adef434693b271a9d88b5656506c96 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Tue, 11 Mar 2025 11:08:21 -0300 Subject: [PATCH 10/56] Update sync and tests --- setup.py | 5 ++-- splitio/sync/split.py | 32 +++++++++++------------ tests/sync/test_splits_synchronizer.py | 35 ++++++++++++++++++-------- 3 files changed, 44 insertions(+), 28 deletions(-) diff --git a/setup.py b/setup.py index 10fa308f..5e78817a 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ 'flake8', 'pytest==7.0.1', 'pytest-mock==3.11.1', - 'coverage', + 'coverage==7.0.0', 'pytest-cov==4.1.0', 'importlib-metadata==6.7', 'tomli==1.2.3', @@ -17,7 +17,8 @@ 'pytest-asyncio==0.21.0', 'aiohttp>=3.8.4', 'aiofiles>=23.1.0', - 'requests-kerberos>=0.15.0' + 'requests-kerberos>=0.15.0', + 'urllib3==2.2.0' ] INSTALL_REQUIRES = [ diff --git a/splitio/sync/split.py b/splitio/sync/split.py index e24a21a0..85f48417 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -86,7 +86,7 @@ def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_st """ SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) - def _fetch_until(self, fetch_options, till=None): + def _fetch_until(self, fetch_options, till=None, rbs_till=None): """ Hit endpoint, update storage and return when since==till. @@ -109,7 +109,7 @@ def _fetch_until(self, fetch_options, till=None): if rbs_change_number is None: rbs_change_number = -1 - if till is not None and till < change_number and till < rbs_change_number: + if (till is not None and till < change_number) or (rbs_till is not None and rbs_till < rbs_change_number): # the passed till is less than change_number, no need to perform updates return change_number, rbs_change_number, segment_list @@ -135,7 +135,7 @@ def _fetch_until(self, fetch_options, till=None): if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list - def _attempt_feature_flag_sync(self, fetch_options, till=None): + def _attempt_feature_flag_sync(self, fetch_options, till=None, rbs_till=None): """ Hit endpoint, update storage and return True if sync is complete. @@ -153,9 +153,9 @@ def _attempt_feature_flag_sync(self, fetch_options, till=None): remaining_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES while True: remaining_attempts -= 1 - change_number, rbs_change_number, segment_list = self._fetch_until(fetch_options, till) + change_number, rbs_change_number, segment_list = self._fetch_until(fetch_options, till, rbs_till) final_segment_list.update(segment_list) - if till is None or (till <= change_number and till <= rbs_change_number): + if (till is None or till <= change_number) and (rbs_till is None or rbs_till <= rbs_change_number): return True, remaining_attempts, change_number, rbs_change_number, final_segment_list elif remaining_attempts <= 0: @@ -176,7 +176,7 @@ def _get_config_sets(self): return ','.join(self._feature_flag_storage.flag_set_filter.sorted_flag_sets) - def synchronize_splits(self, till=None): + def synchronize_splits(self, till=None, rbs_till=None): """ Hit endpoint, update storage and return True if sync is complete. @@ -186,7 +186,7 @@ def synchronize_splits(self, till=None): final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = self._attempt_feature_flag_sync(fetch_options, - till) + till, rbs_till) final_segment_list.update(segment_list) attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if successful_sync: # succedeed sync @@ -194,7 +194,7 @@ def synchronize_splits(self, till=None): return final_segment_list with_cdn_bypass = FetchOptions(True, change_number, rbs_change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN - without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = self._attempt_feature_flag_sync(with_cdn_bypass, till) + without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = self._attempt_feature_flag_sync(with_cdn_bypass, till, rbs_till) final_segment_list.update(segment_list) without_cdn_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if without_cdn_successful_sync: @@ -233,7 +233,7 @@ def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_st """ SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) - async def _fetch_until(self, fetch_options, till=None): + async def _fetch_until(self, fetch_options, till=None, rbs_till=None): """ Hit endpoint, update storage and return when since==till. @@ -256,7 +256,7 @@ async def _fetch_until(self, fetch_options, till=None): if rbs_change_number is None: rbs_change_number = -1 - if till is not None and till < change_number and till < rbs_change_number: + if (till is not None and till < change_number) or (rbs_till is not None and till < rbs_change_number): # the passed till is less than change_number, no need to perform updates return change_number, rbs_change_number, segment_list @@ -282,7 +282,7 @@ async def _fetch_until(self, fetch_options, till=None): if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list - async def _attempt_feature_flag_sync(self, fetch_options, till=None): + async def _attempt_feature_flag_sync(self, fetch_options, till=None, rbs_till=None): """ Hit endpoint, update storage and return True if sync is complete. @@ -300,9 +300,9 @@ async def _attempt_feature_flag_sync(self, fetch_options, till=None): remaining_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES while True: remaining_attempts -= 1 - change_number, rbs_change_number, segment_list = await self._fetch_until(fetch_options, till) + change_number, rbs_change_number, segment_list = await self._fetch_until(fetch_options, till, rbs_till) final_segment_list.update(segment_list) - if till is None or (till <= change_number and till <= rbs_change_number): + if (till is None or till <= change_number) and (rbs_till is None or rbs_till <= rbs_change_number): return True, remaining_attempts, change_number, rbs_change_number, final_segment_list elif remaining_attempts <= 0: @@ -311,7 +311,7 @@ async def _attempt_feature_flag_sync(self, fetch_options, till=None): how_long = self._backoff.get() await asyncio.sleep(how_long) - async def synchronize_splits(self, till=None): + async def synchronize_splits(self, till=None, rbs_till=None): """ Hit endpoint, update storage and return True if sync is complete. @@ -321,7 +321,7 @@ async def synchronize_splits(self, till=None): final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = await self._attempt_feature_flag_sync(fetch_options, - till) + till, rbs_till) final_segment_list.update(segment_list) attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if successful_sync: # succedeed sync @@ -329,7 +329,7 @@ async def synchronize_splits(self, till=None): return final_segment_list with_cdn_bypass = FetchOptions(True, change_number, rbs_change_number, sets=self._get_config_sets()) # Set flag for bypassing CDN - without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = await self._attempt_feature_flag_sync(with_cdn_bypass, till) + without_cdn_successful_sync, remaining_attempts, change_number, rbs_change_number, segment_list = await self._attempt_feature_flag_sync(with_cdn_bypass, till, rbs_till) final_segment_list.update(segment_list) without_cdn_attempts = _ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES - remaining_attempts if without_cdn_successful_sync: diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index 470c2241..2c46f21f 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -322,7 +322,11 @@ def rbs_change_number_mock(): rbs_change_number_mock._calls += 1 if rbs_change_number_mock._calls == 1: return -1 - return 12345 # Return proper cn for CDN Bypass + elif change_number_mock._calls >= 2 and change_number_mock._calls <= 3: + return 555 + elif change_number_mock._calls <= 9: + return 555 + return 666 # Return proper cn for CDN Bypass change_number_mock._calls = 0 rbs_change_number_mock._calls = 0 @@ -330,26 +334,32 @@ def rbs_change_number_mock(): rbs_storage.get_change_number.side_effect = rbs_change_number_mock api = mocker.Mock() - + rbs_1 = copy.deepcopy(json_body['rbs']['d']) def get_changes(*args, **kwargs): get_changes.called += 1 +# pytest.set_trace() if get_changes.called == 1: return { 'ff': { 'd': self.splits, 's': -1, 't': 123 }, - 'rbs': {"t": 123, "s": -1, "d": []}} + 'rbs': {"t": 555, "s": -1, "d": rbs_1}} elif get_changes.called == 2: return { 'ff': { 'd': [], 's': 123, 't': 123 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + 'rbs': {"t": 555, "s": 555, "d": []}} elif get_changes.called == 3: return { 'ff': { 'd': [], 's': 123, 't': 1234 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + 'rbs': {"t": 555, "s": 555, "d": []}} elif get_changes.called >= 4 and get_changes.called <= 6: return { 'ff': { 'd': [], 's': 1234, 't': 1234 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + 'rbs': {"t": 555, "s": 555, "d": []}} elif get_changes.called == 7: return { 'ff': { 'd': [], 's': 1234, 't': 12345 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + 'rbs': {"t": 555, "s": 555, "d": []}} + elif get_changes.called == 8: + return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, + 'rbs': {"t": 555, "s": 555, "d": []}} + rbs_1[0]['excluded']['keys'] = ['bilal@split.io'] return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + 'rbs': {"t": 666, "s": 666, "d": rbs_1}} + get_changes.called = 0 api.fetch_splits.side_effect = get_changes @@ -377,12 +387,17 @@ def intersect(sets): split_synchronizer.synchronize_splits(12345) assert api.fetch_splits.mock_calls[3][1][0] == 1234 assert api.fetch_splits.mock_calls[3][1][2].cache_control_headers == True - assert len(api.fetch_splits.mock_calls) == 10 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) + assert len(api.fetch_splits.mock_calls) == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' + split_synchronizer._backoff = Backoff(1, 0.1) + split_synchronizer.synchronize_splits(None, 666) + inserted_rbs = rbs_storage.update.mock_calls[8][1][0][0] + assert inserted_rbs.excluded.get_excluded_keys() == ['bilal@split.io'] + def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorage(['set1', 'set2']) @@ -723,7 +738,7 @@ def intersect(sets): split_synchronizer._backoff = Backoff(1, 0.1) await split_synchronizer.synchronize_splits(12345) assert (12345, True, 1234) == (self.change_number_3, self.fetch_options_3.cache_control_headers, self.fetch_options_3.change_number) - assert get_changes.called == 10 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) + assert get_changes.called == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) inserted_split = self.parsed_split[0] assert isinstance(inserted_split, Split) From 7df86efd82013854f86ed873a6e01ed73294187b Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Tue, 11 Mar 2025 11:27:15 -0300 Subject: [PATCH 11/56] polishing --- splitio/sync/split.py | 29 ++++++++++++++++- tests/sync/test_splits_synchronizer.py | 44 +++++++++++++++++++------- 2 files changed, 60 insertions(+), 13 deletions(-) diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 85f48417..d0e4690c 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -42,6 +42,9 @@ def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_st :param feature_flag_storage: Feature Flag Storage. :type feature_flag_storage: splitio.storage.InMemorySplitStorage + + :param rule_based_segment_storage: Rule based segment Storage. + :type rule_based_segment_storage: splitio.storage.InMemoryRuleBasedStorage """ self._api = feature_flag_api self._feature_flag_storage = feature_flag_storage @@ -83,6 +86,9 @@ def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_st :param feature_flag_storage: Feature Flag Storage. :type feature_flag_storage: splitio.storage.InMemorySplitStorage + + :param rule_based_segment_storage: Rule based segment Storage. + :type rule_based_segment_storage: splitio.storage.InMemoryRuleBasedStorage """ SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) @@ -96,6 +102,9 @@ def _fetch_until(self, fetch_options, till=None, rbs_till=None): :param till: Passed till from Streaming. :type till: int + :param rbs_till: Passed rbs till from Streaming. + :type rbs_till: int + :return: last change number :rtype: int """ @@ -145,6 +154,9 @@ def _attempt_feature_flag_sync(self, fetch_options, till=None, rbs_till=None): :param till: Passed till from Streaming. :type till: int + :param rbs_till: Passed rbs till from Streaming. + :type rbs_till: int + :return: Flags to check if it should perform bypass or operation ended :rtype: bool, int, int """ @@ -182,6 +194,9 @@ def synchronize_splits(self, till=None, rbs_till=None): :param till: Passed till from Streaming. :type till: int + + :param rbs_till: Passed rbs till from Streaming. + :type rbs_till: int """ final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache @@ -230,6 +245,9 @@ def __init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_st :param feature_flag_storage: Feature Flag Storage. :type feature_flag_storage: splitio.storage.InMemorySplitStorage + + :param rule_based_segment_storage: Rule based segment Storage. + :type rule_based_segment_storage: splitio.storage.InMemoryRuleBasedStorage """ SplitSynchronizerBase.__init__(self, feature_flag_api, feature_flag_storage, rule_based_segment_storage) @@ -243,6 +261,9 @@ async def _fetch_until(self, fetch_options, till=None, rbs_till=None): :param till: Passed till from Streaming. :type till: int + :param rbs_till: Passed rbs till from Streaming. + :type rbs_till: int + :return: last change number :rtype: int """ @@ -256,7 +277,7 @@ async def _fetch_until(self, fetch_options, till=None, rbs_till=None): if rbs_change_number is None: rbs_change_number = -1 - if (till is not None and till < change_number) or (rbs_till is not None and till < rbs_change_number): + if (till is not None and till < change_number) or (rbs_till is not None and rbs_till < rbs_change_number): # the passed till is less than change_number, no need to perform updates return change_number, rbs_change_number, segment_list @@ -292,6 +313,9 @@ async def _attempt_feature_flag_sync(self, fetch_options, till=None, rbs_till=No :param till: Passed till from Streaming. :type till: int + :param rbs_till: Passed rbs till from Streaming. + :type rbs_till: int + :return: Flags to check if it should perform bypass or operation ended :rtype: bool, int, int """ @@ -317,6 +341,9 @@ async def synchronize_splits(self, till=None, rbs_till=None): :param till: Passed till from Streaming. :type till: int + + :param rbs_till: Passed rbs till from Streaming. + :type rbs_till: int """ final_segment_list = set() fetch_options = FetchOptions(True, sets=self._get_config_sets()) # Set Cache-Control to no-cache diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index 2c46f21f..2acf293f 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -337,7 +337,6 @@ def rbs_change_number_mock(): rbs_1 = copy.deepcopy(json_body['rbs']['d']) def get_changes(*args, **kwargs): get_changes.called += 1 -# pytest.set_trace() if get_changes.called == 1: return { 'ff': { 'd': self.splits, 's': -1, 't': 123 }, 'rbs': {"t": 555, "s": -1, "d": rbs_1}} @@ -392,6 +391,8 @@ def intersect(sets): inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' + inserted_rbs = rbs_storage.update.mock_calls[0][1][0][0] + assert inserted_rbs.excluded.get_excluded_keys() == ["mauro@split.io","gaston@split.io"] split_synchronizer._backoff = Backoff(1, 0.1) split_synchronizer.synchronize_splits(None, 666) @@ -664,7 +665,11 @@ async def rbs_change_number_mock(): rbs_change_number_mock._calls += 1 if rbs_change_number_mock._calls == 1: return -1 - return 12345 # Return proper cn for CDN Bypass + elif change_number_mock._calls >= 2 and change_number_mock._calls <= 3: + return 555 + elif change_number_mock._calls <= 9: + return 555 + return 666 # Return proper cn for CDN Bypass change_number_mock._calls = 0 rbs_change_number_mock._calls = 0 @@ -677,8 +682,10 @@ async def update(parsed_split, deleted, change_number): self.parsed_split = parsed_split storage.update = update + self.parsed_rbs = None async def rbs_update(parsed, deleted, change_number): - pass + if len(parsed) > 0: + self.parsed_rbs = parsed rbs_storage.update = rbs_update api = mocker.Mock() @@ -688,32 +695,38 @@ async def rbs_update(parsed, deleted, change_number): self.fetch_options_2 = None self.change_number_3 = None self.fetch_options_3 = None + rbs_1 = copy.deepcopy(json_body['rbs']['d']) + async def get_changes(change_number, rbs_change_number, fetch_options): get_changes.called += 1 if get_changes.called == 1: self.change_number_1 = change_number self.fetch_options_1 = fetch_options return { 'ff': { 'd': self.splits, 's': -1, 't': 123 }, - 'rbs': {"t": 123, "s": -1, "d": []}} + 'rbs': {"t": 555, "s": -1, "d": rbs_1}} elif get_changes.called == 2: self.change_number_2 = change_number self.fetch_options_2 = fetch_options return { 'ff': { 'd': [], 's': 123, 't': 123 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + 'rbs': {"t": 555, "s": 555, "d": []}} elif get_changes.called == 3: return { 'ff': { 'd': [], 's': 123, 't': 1234 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + 'rbs': {"t": 555, "s": 555, "d": []}} elif get_changes.called >= 4 and get_changes.called <= 6: return { 'ff': { 'd': [], 's': 1234, 't': 1234 }, - 'rbs': {"t": 123, "s": 123, "d": []}} + 'rbs': {"t": 555, "s": 555, "d": []}} elif get_changes.called == 7: return { 'ff': { 'd': [], 's': 1234, 't': 12345 }, - 'rbs': {"t": 123, "s": 123, "d": []}} - self.change_number_3 = change_number - self.fetch_options_3 = fetch_options + 'rbs': {"t": 555, "s": 555, "d": []}} + elif get_changes.called == 8: + self.change_number_3 = change_number + self.fetch_options_3 = fetch_options + return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, + 'rbs': {"t": 555, "s": 555, "d": []}} + rbs_1[0]['excluded']['keys'] = ['bilal@split.io'] return { 'ff': { 'd': [], 's': 12345, 't': 12345 }, - 'rbs': {"t": 123, "s": 123, "d": []}} - + 'rbs': {"t": 666, "s": 666, "d": rbs_1}} + get_changes.called = 0 api.fetch_splits = get_changes @@ -743,7 +756,14 @@ def intersect(sets): inserted_split = self.parsed_split[0] assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' + inserted_rbs = self.parsed_rbs[0] + assert inserted_rbs.excluded.get_excluded_keys() == ["mauro@split.io","gaston@split.io"] + split_synchronizer._backoff = Backoff(1, 0.1) + await split_synchronizer.synchronize_splits(None, 666) + inserted_rbs = self.parsed_rbs[0] + assert inserted_rbs.excluded.get_excluded_keys() == ['bilal@split.io'] + @pytest.mark.asyncio async def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" From 3396b5fa85a88f7a919b42a9eafba0b8410e6e6c Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Wed, 12 Mar 2025 12:37:14 -0300 Subject: [PATCH 12/56] Updated SSE classes --- splitio/models/telemetry.py | 1 + splitio/push/parser.py | 60 ++++++++++- splitio/push/processor.py | 10 +- splitio/push/workers.py | 121 +++++++++++++++------ splitio/spec.py | 2 +- tests/push/test_split_worker.py | 179 ++++++++++++++++++++++++-------- 6 files changed, 292 insertions(+), 81 deletions(-) diff --git a/splitio/models/telemetry.py b/splitio/models/telemetry.py index f734cf67..c9715da4 100644 --- a/splitio/models/telemetry.py +++ b/splitio/models/telemetry.py @@ -140,6 +140,7 @@ class OperationMode(Enum): class UpdateFromSSE(Enum): """Update from sse constants""" SPLIT_UPDATE = 'sp' + RBS_UPDATE = 'rbs' def get_latency_bucket_index(micros): """ diff --git a/splitio/push/parser.py b/splitio/push/parser.py index 098221e1..79b410e3 100644 --- a/splitio/push/parser.py +++ b/splitio/push/parser.py @@ -28,6 +28,7 @@ class UpdateType(Enum): SPLIT_UPDATE = 'SPLIT_UPDATE' SPLIT_KILL = 'SPLIT_KILL' SEGMENT_UPDATE = 'SEGMENT_UPDATE' + RB_SEGMENT_UPDATE = 'RB_SEGMENT_UPDATE' class ControlType(Enum): @@ -329,7 +330,7 @@ def __init__(self, channel, timestamp, change_number, previous_change_number, fe """Class constructor.""" BaseUpdate.__init__(self, channel, timestamp, change_number) self._previous_change_number = previous_change_number - self._feature_flag_definition = feature_flag_definition + self._object_definition = feature_flag_definition self._compression = compression @property @@ -352,13 +353,13 @@ def previous_change_number(self): # pylint:disable=no-self-use return self._previous_change_number @property - def feature_flag_definition(self): # pylint:disable=no-self-use + def object_definition(self): # pylint:disable=no-self-use """ Return feature flag definition :returns: The new feature flag definition :rtype: str """ - return self._feature_flag_definition + return self._object_definition @property def compression(self): # pylint:disable=no-self-use @@ -451,6 +452,56 @@ def __str__(self): """Return string representation.""" return "SegmentChange - changeNumber=%d, name=%s" % (self.change_number, self.segment_name) +class RBSChangeUpdate(BaseUpdate): + """rbs Change notification.""" + + def __init__(self, channel, timestamp, change_number, previous_change_number, rbs_definition, compression): + """Class constructor.""" + BaseUpdate.__init__(self, channel, timestamp, change_number) + self._previous_change_number = previous_change_number + self._object_definition = rbs_definition + self._compression = compression + + @property + def update_type(self): # pylint:disable=no-self-use + """ + Return the message type. + + :returns: The type of this parsed Update. + :rtype: UpdateType + """ + return UpdateType.RB_SEGMENT_UPDATE + + @property + def previous_change_number(self): # pylint:disable=no-self-use + """ + Return previous change number + :returns: The previous change number + :rtype: int + """ + return self._previous_change_number + + @property + def object_definition(self): # pylint:disable=no-self-use + """ + Return rbs definition + :returns: The new rbs definition + :rtype: str + """ + return self._object_definition + + @property + def compression(self): # pylint:disable=no-self-use + """ + Return previous compression type + :returns: The compression type + :rtype: int + """ + return self._compression + + def __str__(self): + """Return string representation.""" + return "RBSChange - changeNumber=%d" % (self.change_number) class ControlMessage(BaseMessage): """Control notification.""" @@ -503,6 +554,9 @@ def _parse_update(channel, timestamp, data): if update_type == UpdateType.SPLIT_UPDATE and change_number is not None: return SplitChangeUpdate(channel, timestamp, change_number, data.get('pcn'), data.get('d'), data.get('c')) + if update_type == UpdateType.RB_SEGMENT_UPDATE and change_number is not None: + return RBSChangeUpdate(channel, timestamp, change_number, data.get('pcn'), data.get('d'), data.get('c')) + elif update_type == UpdateType.SPLIT_KILL and change_number is not None: return SplitKillUpdate(channel, timestamp, change_number, data['splitName'], data['defaultTreatment']) diff --git a/splitio/push/processor.py b/splitio/push/processor.py index e8de95c8..41d796c7 100644 --- a/splitio/push/processor.py +++ b/splitio/push/processor.py @@ -35,12 +35,13 @@ def __init__(self, synchronizer, telemetry_runtime_producer): self._feature_flag_queue = Queue() self._segments_queue = Queue() self._synchronizer = synchronizer - self._feature_flag_worker = SplitWorker(synchronizer.synchronize_splits, synchronizer.synchronize_segment, self._feature_flag_queue, synchronizer.split_sync.feature_flag_storage, synchronizer.segment_storage, telemetry_runtime_producer) + self._feature_flag_worker = SplitWorker(synchronizer.synchronize_splits, synchronizer.synchronize_segment, self._feature_flag_queue, synchronizer.split_sync.feature_flag_storage, synchronizer.segment_storage, telemetry_runtime_producer, synchronizer.split_sync.rule_based_segment_storage) self._segments_worker = SegmentWorker(synchronizer.synchronize_segment, self._segments_queue) self._handlers = { UpdateType.SPLIT_UPDATE: self._handle_feature_flag_update, UpdateType.SPLIT_KILL: self._handle_feature_flag_kill, - UpdateType.SEGMENT_UPDATE: self._handle_segment_change + UpdateType.SEGMENT_UPDATE: self._handle_segment_change, + UpdateType.RB_SEGMENT_UPDATE: self._handle_feature_flag_update } def _handle_feature_flag_update(self, event): @@ -119,12 +120,13 @@ def __init__(self, synchronizer, telemetry_runtime_producer): self._feature_flag_queue = asyncio.Queue() self._segments_queue = asyncio.Queue() self._synchronizer = synchronizer - self._feature_flag_worker = SplitWorkerAsync(synchronizer.synchronize_splits, synchronizer.synchronize_segment, self._feature_flag_queue, synchronizer.split_sync.feature_flag_storage, synchronizer.segment_storage, telemetry_runtime_producer) + self._feature_flag_worker = SplitWorkerAsync(synchronizer.synchronize_splits, synchronizer.synchronize_segment, self._feature_flag_queue, synchronizer.split_sync.feature_flag_storage, synchronizer.segment_storage, telemetry_runtime_producer, synchronizer.split_sync.rule_based_segment_storage) self._segments_worker = SegmentWorkerAsync(synchronizer.synchronize_segment, self._segments_queue) self._handlers = { UpdateType.SPLIT_UPDATE: self._handle_feature_flag_update, UpdateType.SPLIT_KILL: self._handle_feature_flag_kill, - UpdateType.SEGMENT_UPDATE: self._handle_segment_change + UpdateType.SEGMENT_UPDATE: self._handle_segment_change, + UpdateType.RB_SEGMENT_UPDATE: self._handle_feature_flag_update } async def _handle_feature_flag_update(self, event): diff --git a/splitio/push/workers.py b/splitio/push/workers.py index 5161d15d..e4888f36 100644 --- a/splitio/push/workers.py +++ b/splitio/push/workers.py @@ -9,11 +9,13 @@ from enum import Enum from splitio.models.splits import from_raw +from splitio.models.rule_based_segments import from_raw as rbs_from_raw from splitio.models.telemetry import UpdateFromSSE from splitio.push import SplitStorageException from splitio.push.parser import UpdateType from splitio.optional.loaders import asyncio -from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async +from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async, \ + update_rule_based_segment_storage, update_rule_based_segment_storage_async _LOGGER = logging.getLogger(__name__) @@ -25,9 +27,9 @@ class CompressionMode(Enum): ZLIB_COMPRESSION = 2 _compression_handlers = { - CompressionMode.NO_COMPRESSION: lambda event: base64.b64decode(event.feature_flag_definition), - CompressionMode.GZIP_COMPRESSION: lambda event: gzip.decompress(base64.b64decode(event.feature_flag_definition)).decode('utf-8'), - CompressionMode.ZLIB_COMPRESSION: lambda event: zlib.decompress(base64.b64decode(event.feature_flag_definition)).decode('utf-8'), + CompressionMode.NO_COMPRESSION: lambda event: base64.b64decode(event.object_definition), + CompressionMode.GZIP_COMPRESSION: lambda event: gzip.decompress(base64.b64decode(event.object_definition)).decode('utf-8'), + CompressionMode.ZLIB_COMPRESSION: lambda event: zlib.decompress(base64.b64decode(event.object_definition)).decode('utf-8'), } class WorkerBase(object, metaclass=abc.ABCMeta): @@ -45,10 +47,19 @@ def start(self): def stop(self): """Stop worker.""" - def _get_feature_flag_definition(self, event): - """return feature flag definition in event.""" + def _get_object_definition(self, event): + """return feature flag or rule based segment definition in event.""" cm = CompressionMode(event.compression) # will throw if the number is not defined in compression mode return _compression_handlers[cm](event) + + def _get_referenced_rbs(self, feature_flag): + referenced_rbs = set() + for condition in feature_flag.conditions: + for matcher in condition.matchers: + raw_matcher = matcher.to_json() + if raw_matcher['matcherType'] == 'IN_RULE_BASED_SEGMENT': + referenced_rbs.add(raw_matcher['userDefinedSegmentMatcherData']['segmentName']) + return referenced_rbs class SegmentWorker(WorkerBase): """Segment Worker for processing updates.""" @@ -173,7 +184,7 @@ class SplitWorker(WorkerBase): _centinel = object() - def __init__(self, synchronize_feature_flag, synchronize_segment, feature_flag_queue, feature_flag_storage, segment_storage, telemetry_runtime_producer): + def __init__(self, synchronize_feature_flag, synchronize_segment, feature_flag_queue, feature_flag_storage, segment_storage, telemetry_runtime_producer, rule_based_segment_storage): """ Class constructor. @@ -189,6 +200,8 @@ def __init__(self, synchronize_feature_flag, synchronize_segment, feature_flag_q :type segment_storage: splitio.storage.inmemory.InMemorySegmentStorage :param telemetry_runtime_producer: Telemetry runtime producer instance :type telemetry_runtime_producer: splitio.engine.telemetry.TelemetryRuntimeProducer + :param rule_based_segment_storage: Rule based segment Storage. + :type rule_based_segment_storage: splitio.storage.InMemoryRuleBasedStorage """ self._feature_flag_queue = feature_flag_queue self._handler = synchronize_feature_flag @@ -198,6 +211,7 @@ def __init__(self, synchronize_feature_flag, synchronize_segment, feature_flag_q self._feature_flag_storage = feature_flag_storage self._segment_storage = segment_storage self._telemetry_runtime_producer = telemetry_runtime_producer + self._rule_based_segment_storage = rule_based_segment_storage def is_running(self): """Return whether the working is running.""" @@ -206,18 +220,30 @@ def is_running(self): def _apply_iff_if_needed(self, event): if not self._check_instant_ff_update(event): return False - try: - new_feature_flag = from_raw(json.loads(self._get_feature_flag_definition(event))) - segment_list = update_feature_flag_storage(self._feature_flag_storage, [new_feature_flag], event.change_number) - for segment_name in segment_list: - if self._segment_storage.get(segment_name) is None: - _LOGGER.debug('Fetching new segment %s', segment_name) - self._segment_handler(segment_name, event.change_number) - - self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.SPLIT_UPDATE) + if event.update_type == UpdateType.SPLIT_UPDATE: + new_feature_flag = from_raw(json.loads(self._get_object_definition(event))) + segment_list = update_feature_flag_storage(self._feature_flag_storage, [new_feature_flag], event.change_number) + for segment_name in segment_list: + if self._segment_storage.get(segment_name) is None: + _LOGGER.debug('Fetching new segment %s', segment_name) + self._segment_handler(segment_name, event.change_number) + + referenced_rbs = self._get_referenced_rbs(new_feature_flag) + if len(referenced_rbs) > 0 and not self._rule_based_segment_storage.contains(referenced_rbs): + _LOGGER.debug('Fetching new rule based segment(s) %s', referenced_rbs) + self._handler(None, event.change_number) + self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.SPLIT_UPDATE) + else: + new_rbs = rbs_from_raw(json.loads(self._get_object_definition(event))) + segment_list = update_rule_based_segment_storage(self._rule_based_segment_storage, [new_rbs], event.change_number) + for segment_name in segment_list: + if self._segment_storage.get(segment_name) is None: + _LOGGER.debug('Fetching new segment %s', segment_name) + self._segment_handler(segment_name, event.change_number) + self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.RBS_UPDATE) return True - + except Exception as e: raise SplitStorageException(e) @@ -225,6 +251,9 @@ def _check_instant_ff_update(self, event): if event.update_type == UpdateType.SPLIT_UPDATE and event.compression is not None and event.previous_change_number == self._feature_flag_storage.get_change_number(): return True + if event.update_type == UpdateType.RB_SEGMENT_UPDATE and event.compression is not None and event.previous_change_number == self._rule_based_segment_storage.get_change_number(): + return True + return False def _run(self): @@ -239,8 +268,13 @@ def _run(self): try: if self._apply_iff_if_needed(event): continue - - sync_result = self._handler(event.change_number) + till = None + rbs_till = None + if event.update_type == UpdateType.SPLIT_UPDATE: + till = event.change_number + else: + rbs_till = event.change_number + sync_result = self._handler(till, rbs_till) if not sync_result.success and sync_result.error_code is not None and sync_result.error_code == 414: _LOGGER.error("URI too long exception caught, sync failed") @@ -279,7 +313,7 @@ class SplitWorkerAsync(WorkerBase): _centinel = object() - def __init__(self, synchronize_feature_flag, synchronize_segment, feature_flag_queue, feature_flag_storage, segment_storage, telemetry_runtime_producer): + def __init__(self, synchronize_feature_flag, synchronize_segment, feature_flag_queue, feature_flag_storage, segment_storage, telemetry_runtime_producer, rule_based_segment_storage): """ Class constructor. @@ -295,6 +329,8 @@ def __init__(self, synchronize_feature_flag, synchronize_segment, feature_flag_q :type segment_storage: splitio.storage.inmemory.InMemorySegmentStorage :param telemetry_runtime_producer: Telemetry runtime producer instance :type telemetry_runtime_producer: splitio.engine.telemetry.TelemetryRuntimeProducer + :param rule_based_segment_storage: Rule based segment Storage. + :type rule_based_segment_storage: splitio.storage.InMemoryRuleBasedStorage """ self._feature_flag_queue = feature_flag_queue self._handler = synchronize_feature_flag @@ -303,7 +339,8 @@ def __init__(self, synchronize_feature_flag, synchronize_segment, feature_flag_q self._feature_flag_storage = feature_flag_storage self._segment_storage = segment_storage self._telemetry_runtime_producer = telemetry_runtime_producer - + self._rule_based_segment_storage = rule_based_segment_storage + def is_running(self): """Return whether the working is running.""" return self._running @@ -312,23 +349,39 @@ async def _apply_iff_if_needed(self, event): if not await self._check_instant_ff_update(event): return False try: - new_feature_flag = from_raw(json.loads(self._get_feature_flag_definition(event))) - segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, [new_feature_flag], event.change_number) - for segment_name in segment_list: - if await self._segment_storage.get(segment_name) is None: - _LOGGER.debug('Fetching new segment %s', segment_name) - await self._segment_handler(segment_name, event.change_number) - - await self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.SPLIT_UPDATE) + if event.update_type == UpdateType.SPLIT_UPDATE: + new_feature_flag = from_raw(json.loads(self._get_object_definition(event))) + segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, [new_feature_flag], event.change_number) + for segment_name in segment_list: + if await self._segment_storage.get(segment_name) is None: + _LOGGER.debug('Fetching new segment %s', segment_name) + await self._segment_handler(segment_name, event.change_number) + + referenced_rbs = self._get_referenced_rbs(new_feature_flag) + if len(referenced_rbs) > 0 and not await self._rule_based_segment_storage.contains(referenced_rbs): + await self._handler(None, event.change_number) + + await self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.SPLIT_UPDATE) + else: + new_rbs = rbs_from_raw(json.loads(self._get_object_definition(event))) + segment_list = await update_rule_based_segment_storage_async(self._rule_based_segment_storage, [new_rbs], event.change_number) + for segment_name in segment_list: + if await self._segment_storage.get(segment_name) is None: + _LOGGER.debug('Fetching new segment %s', segment_name) + await self._segment_handler(segment_name, event.change_number) + await self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.RBS_UPDATE) return True except Exception as e: raise SplitStorageException(e) - async def _check_instant_ff_update(self, event): if event.update_type == UpdateType.SPLIT_UPDATE and event.compression is not None and event.previous_change_number == await self._feature_flag_storage.get_change_number(): return True + + if event.update_type == UpdateType.RB_SEGMENT_UPDATE and event.compression is not None and event.previous_change_number == await self._rule_based_segment_storage.get_change_number(): + return True + return False async def _run(self): @@ -343,7 +396,13 @@ async def _run(self): try: if await self._apply_iff_if_needed(event): continue - await self._handler(event.change_number) + till = None + rbs_till = None + if event.update_type == UpdateType.SPLIT_UPDATE: + till = event.change_number + else: + rbs_till = event.change_number + await self._handler(till, rbs_till) except SplitStorageException as e: # pylint: disable=broad-except _LOGGER.error('Exception Updating Feature Flag') _LOGGER.debug('Exception information: ', exc_info=True) diff --git a/splitio/spec.py b/splitio/spec.py index 1388fcda..cd7588e0 100644 --- a/splitio/spec.py +++ b/splitio/spec.py @@ -1 +1 @@ -SPEC_VERSION = '1.1' +SPEC_VERSION = '1.3' diff --git a/tests/push/test_split_worker.py b/tests/push/test_split_worker.py index d792cada..0d3ac824 100644 --- a/tests/push/test_split_worker.py +++ b/tests/push/test_split_worker.py @@ -1,79 +1,127 @@ """Split Worker tests.""" import time import queue +import base64 import pytest from splitio.api import APIException from splitio.push.workers import SplitWorker, SplitWorkerAsync from splitio.models.notification import SplitChangeNotification from splitio.optional.loaders import asyncio -from splitio.push.parser import SplitChangeUpdate +from splitio.push.parser import SplitChangeUpdate, RBSChangeUpdate from splitio.engine.telemetry import TelemetryStorageProducer, TelemetryStorageProducerAsync from splitio.storage.inmemmory import InMemoryTelemetryStorage, InMemorySplitStorage, InMemorySegmentStorage, \ InMemoryTelemetryStorageAsync, InMemorySplitStorageAsync, InMemorySegmentStorageAsync change_number_received = None - - -def handler_sync(change_number): +rbs = { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": False, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + } + +def handler_sync(change_number, rbs_change_number): global change_number_received + global rbs_change_number_received + change_number_received = change_number + rbs_change_number_received = rbs_change_number return -async def handler_async(change_number): +async def handler_async(change_number, rbs_change_number): global change_number_received + global rbs_change_number_received change_number_received = change_number + rbs_change_number_received = rbs_change_number return class SplitWorkerTests(object): - def test_on_error(self, mocker): - q = queue.Queue() - def handler_sync(change_number): - raise APIException('some') - - split_worker = SplitWorker(handler_sync, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock()) - split_worker.start() - assert split_worker.is_running() - - q.put(SplitChangeUpdate('some', 'SPLIT_UPDATE', 123456789, None, None, None)) - with pytest.raises(Exception): - split_worker._handler() - - assert split_worker.is_running() - assert split_worker._worker.is_alive() - split_worker.stop() - time.sleep(1) - assert not split_worker.is_running() - assert not split_worker._worker.is_alive() - def test_handler(self, mocker): q = queue.Queue() - split_worker = SplitWorker(handler_sync, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock()) + split_worker = SplitWorker(handler_sync, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) global change_number_received + global rbs_change_number_received assert not split_worker.is_running() split_worker.start() assert split_worker.is_running() - - # should call the handler - q.put(SplitChangeUpdate('some', 'SPLIT_UPDATE', 123456789, None, None, None)) - time.sleep(0.1) - assert change_number_received == 123456789 - + def get_change_number(): return 2345 split_worker._feature_flag_storage.get_change_number = get_change_number + def get_rbs_change_number(): + return 2345 + split_worker._rule_based_segment_storage.get_change_number = get_rbs_change_number + self._feature_flag_added = None self._feature_flag_deleted = None def update(feature_flag_add, feature_flag_delete, change_number): self._feature_flag_added = feature_flag_add - self._feature_flag_deleted = feature_flag_delete + self._feature_flag_deleted = feature_flag_delete split_worker._feature_flag_storage.update = update split_worker._feature_flag_storage.config_flag_sets_used = 0 + self._rbs_added = None + self._rbs_deleted = None + def update(rbs_add, rbs_delete, change_number): + self._rbs_added = rbs_add + self._rbs_deleted = rbs_delete + split_worker._rule_based_segment_storage.update = update + + # should not call the handler + rbs_change_number_received = 0 + rbs1 = str(rbs) + rbs1 = rbs1.replace("'", "\"") + rbs1 = rbs1.replace("False", "false") + encoded = base64.b64encode(bytes(rbs1, "utf-8")) + q.put(RBSChangeUpdate('some', 'RB_SEGMENT_UPDATE', 123456790, 2345, encoded, 0)) + time.sleep(0.1) + assert rbs_change_number_received == 0 + assert self._rbs_added[0].name == "sample_rule_based_segment" + + # should call the handler + q.put(SplitChangeUpdate('some', 'SPLIT_UPDATE', 123456789, None, None, None)) + time.sleep(0.1) + assert change_number_received == 123456789 + assert rbs_change_number_received == None + + # should call the handler + q.put(RBSChangeUpdate('some', 'RB_SEGMENT_UPDATE', 123456789, None, None, None)) + time.sleep(0.1) + assert rbs_change_number_received == 123456789 + assert change_number_received == None + + # should call the handler q.put(SplitChangeUpdate('some', 'SPLIT_UPDATE', 123456790, 12345, "{}", 1)) time.sleep(0.1) @@ -94,12 +142,32 @@ def update(feature_flag_add, feature_flag_delete, change_number): split_worker.stop() assert not split_worker.is_running() + def test_on_error(self, mocker): + q = queue.Queue() + def handler_sync(change_number): + raise APIException('some') + + split_worker = SplitWorker(handler_sync, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) + split_worker.start() + assert split_worker.is_running() + + q.put(SplitChangeUpdate('some', 'SPLIT_UPDATE', 123456789, None, None, None)) + with pytest.raises(Exception): + split_worker._handler() + + assert split_worker.is_running() + assert split_worker._worker.is_alive() + split_worker.stop() + time.sleep(1) + assert not split_worker.is_running() + assert not split_worker._worker.is_alive() + def test_compression(self, mocker): q = queue.Queue() telemetry_storage = InMemoryTelemetryStorage() telemetry_producer = TelemetryStorageProducer(telemetry_storage) telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() - split_worker = SplitWorker(handler_sync, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), telemetry_runtime_producer) + split_worker = SplitWorker(handler_sync, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), telemetry_runtime_producer, mocker.Mock()) global change_number_received split_worker.start() def get_change_number(): @@ -148,7 +216,7 @@ def update(feature_flag_add, feature_flag_delete, change_number): def test_edge_cases(self, mocker): q = queue.Queue() - split_worker = SplitWorker(handler_sync, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock()) + split_worker = SplitWorker(handler_sync, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) global change_number_received split_worker.start() @@ -201,7 +269,7 @@ def test_fetch_segment(self, mocker): def segment_handler_sync(segment_name, change_number): self.segment_name = segment_name return - split_worker = SplitWorker(handler_sync, segment_handler_sync, q, split_storage, segment_storage, mocker.Mock()) + split_worker = SplitWorker(handler_sync, segment_handler_sync, q, split_storage, segment_storage, mocker.Mock(), mocker.Mock()) split_worker.start() def get_change_number(): @@ -225,7 +293,7 @@ async def test_on_error(self, mocker): def handler_sync(change_number): raise APIException('some') - split_worker = SplitWorkerAsync(handler_async, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock()) + split_worker = SplitWorkerAsync(handler_async, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) split_worker.start() assert split_worker.is_running() @@ -253,7 +321,7 @@ def _worker_running(self): @pytest.mark.asyncio async def test_handler(self, mocker): q = asyncio.Queue() - split_worker = SplitWorkerAsync(handler_async, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock()) + split_worker = SplitWorkerAsync(handler_async, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) assert not split_worker.is_running() split_worker.start() @@ -261,7 +329,8 @@ async def test_handler(self, mocker): assert(self._worker_running()) global change_number_received - + global rbs_change_number_received + # should call the handler await q.put(SplitChangeUpdate('some', 'SPLIT_UPDATE', 123456789, None, None, None)) await asyncio.sleep(0.1) @@ -271,6 +340,10 @@ async def get_change_number(): return 2345 split_worker._feature_flag_storage.get_change_number = get_change_number + async def get_rbs_change_number(): + return 2345 + split_worker._rule_based_segment_storage.get_change_number = get_rbs_change_number + self.new_change_number = 0 self._feature_flag_added = None self._feature_flag_deleted = None @@ -289,6 +362,24 @@ async def record_update_from_sse(xx): pass split_worker._telemetry_runtime_producer.record_update_from_sse = record_update_from_sse + self._rbs_added = None + self._rbs_deleted = None + async def update_rbs(rbs_add, rbs_delete, change_number): + self._rbs_added = rbs_add + self._rbs_deleted = rbs_delete + split_worker._rule_based_segment_storage.update = update_rbs + + # should not call the handler + rbs_change_number_received = 0 + rbs1 = str(rbs) + rbs1 = rbs1.replace("'", "\"") + rbs1 = rbs1.replace("False", "false") + encoded = base64.b64encode(bytes(rbs1, "utf-8")) + await q.put(RBSChangeUpdate('some', 'RB_SEGMENT_UPDATE', 123456790, 2345, encoded, 0)) + await asyncio.sleep(0.1) + assert rbs_change_number_received == 0 + assert self._rbs_added[0].name == "sample_rule_based_segment" + # should call the handler await q.put(SplitChangeUpdate('some', 'SPLIT_UPDATE', 123456790, 12345, "{}", 1)) await asyncio.sleep(0.1) @@ -318,7 +409,7 @@ async def test_compression(self, mocker): telemetry_storage = await InMemoryTelemetryStorageAsync.create() telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() - split_worker = SplitWorkerAsync(handler_async, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), telemetry_runtime_producer) + split_worker = SplitWorkerAsync(handler_async, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), telemetry_runtime_producer, mocker.Mock()) global change_number_received split_worker.start() async def get_change_number(): @@ -343,6 +434,10 @@ async def update(feature_flag_add, feature_flag_delete, change_number): split_worker._feature_flag_storage.update = update split_worker._feature_flag_storage.config_flag_sets_used = 0 + async def contains(rbs): + return False + split_worker._rule_based_segment_storage.contains = contains + # compression 0 await q.put(SplitChangeUpdate('some', 'SPLIT_UPDATE', 123456790, 2345, 'eyJ0cmFmZmljVHlwZU5hbWUiOiJ1c2VyIiwiaWQiOiIzM2VhZmE1MC0xYTY1LTExZWQtOTBkZi1mYTMwZDk2OTA0NDUiLCJuYW1lIjoiYmlsYWxfc3BsaXQiLCJ0cmFmZmljQWxsb2NhdGlvbiI6MTAwLCJ0cmFmZmljQWxsb2NhdGlvblNlZWQiOi0xMzY0MTE5MjgyLCJzZWVkIjotNjA1OTM4ODQzLCJzdGF0dXMiOiJBQ1RJVkUiLCJraWxsZWQiOmZhbHNlLCJkZWZhdWx0VHJlYXRtZW50Ijoib2ZmIiwiY2hhbmdlTnVtYmVyIjoxNjg0MzQwOTA4NDc1LCJhbGdvIjoyLCJjb25maWd1cmF0aW9ucyI6e30sImNvbmRpdGlvbnMiOlt7ImNvbmRpdGlvblR5cGUiOiJST0xMT1VUIiwibWF0Y2hlckdyb3VwIjp7ImNvbWJpbmVyIjoiQU5EIiwibWF0Y2hlcnMiOlt7ImtleVNlbGVjdG9yIjp7InRyYWZmaWNUeXBlIjoidXNlciJ9LCJtYXRjaGVyVHlwZSI6IklOX1NFR01FTlQiLCJuZWdhdGUiOmZhbHNlLCJ1c2VyRGVmaW5lZFNlZ21lbnRNYXRjaGVyRGF0YSI6eyJzZWdtZW50TmFtZSI6ImJpbGFsX3NlZ21lbnQifX1dfSwicGFydGl0aW9ucyI6W3sidHJlYXRtZW50Ijoib24iLCJzaXplIjowfSx7InRyZWF0bWVudCI6Im9mZiIsInNpemUiOjEwMH1dLCJsYWJlbCI6ImluIHNlZ21lbnQgYmlsYWxfc2VnbWVudCJ9LHsiY29uZGl0aW9uVHlwZSI6IlJPTExPVVQiLCJtYXRjaGVyR3JvdXAiOnsiY29tYmluZXIiOiJBTkQiLCJtYXRjaGVycyI6W3sia2V5U2VsZWN0b3IiOnsidHJhZmZpY1R5cGUiOiJ1c2VyIn0sIm1hdGNoZXJUeXBlIjoiQUxMX0tFWVMiLCJuZWdhdGUiOmZhbHNlfV19LCJwYXJ0aXRpb25zIjpbeyJ0cmVhdG1lbnQiOiJvbiIsInNpemUiOjB9LHsidHJlYXRtZW50Ijoib2ZmIiwic2l6ZSI6MTAwfV0sImxhYmVsIjoiZGVmYXVsdCBydWxlIn1dfQ==', 0)) await asyncio.sleep(0.1) @@ -376,7 +471,7 @@ async def update(feature_flag_add, feature_flag_delete, change_number): @pytest.mark.asyncio async def test_edge_cases(self, mocker): q = asyncio.Queue() - split_worker = SplitWorkerAsync(handler_async, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock()) + split_worker = SplitWorkerAsync(handler_async, mocker.Mock(), q, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) global change_number_received split_worker.start() @@ -434,7 +529,7 @@ async def test_fetch_segment(self, mocker): async def segment_handler_sync(segment_name, change_number): self.segment_name = segment_name return - split_worker = SplitWorkerAsync(handler_async, segment_handler_sync, q, split_storage, segment_storage, mocker.Mock()) + split_worker = SplitWorkerAsync(handler_async, segment_handler_sync, q, split_storage, segment_storage, mocker.Mock(), mocker.Mock()) split_worker.start() async def get_change_number(): From 7cd34ebff80915d5e58e7c5f0eacb194c8c1424b Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Wed, 12 Mar 2025 16:56:28 -0300 Subject: [PATCH 13/56] updated redis, pluggable and localjson storages --- splitio/storage/inmemmory.py | 3 +- splitio/storage/pluggable.py | 244 ++++++++++++++++++++++++++++++++++- splitio/storage/redis.py | 236 ++++++++++++++++++++++++++++++++- splitio/sync/split.py | 104 ++++++++++----- 4 files changed, 549 insertions(+), 38 deletions(-) diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index f7af8825..98fc0543 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -109,6 +109,7 @@ def remove_flag_set(self, flag_sets, feature_flag_name, should_filter): class InMemoryRuleBasedSegmentStorage(RuleBasedSegmentsStorage): """InMemory implementation of a feature flag storage base.""" + def __init__(self): """Constructor.""" self._lock = threading.RLock() @@ -192,7 +193,7 @@ def _set_change_number(self, new_change_number): def get_segment_names(self): """ - Retrieve a list of all excluded segments names. + Retrieve a list of all rule based segments names. :return: List of segment names. :rtype: list(str) diff --git a/splitio/storage/pluggable.py b/splitio/storage/pluggable.py index 7f0a5287..1cb7e054 100644 --- a/splitio/storage/pluggable.py +++ b/splitio/storage/pluggable.py @@ -5,15 +5,253 @@ import threading from splitio.optional.loaders import asyncio -from splitio.models import splits, segments +from splitio.models import splits, segments, rule_based_segments from splitio.models.impressions import Impression from splitio.models.telemetry import MethodExceptions, MethodLatencies, TelemetryConfig, MAX_TAGS,\ MethodLatenciesAsync, MethodExceptionsAsync, TelemetryConfigAsync -from splitio.storage import FlagSetsFilter, SplitStorage, SegmentStorage, ImpressionStorage, EventStorage, TelemetryStorage +from splitio.storage import FlagSetsFilter, SplitStorage, SegmentStorage, ImpressionStorage, EventStorage, TelemetryStorage, RuleBasedSegmentsStorage from splitio.util.storage_helper import get_valid_flag_sets, combine_valid_flag_sets _LOGGER = logging.getLogger(__name__) +class PluggableRuleBasedSegmentsStorageBase(RuleBasedSegmentsStorage): + """RedPluggable storage for rule based segments.""" + + _RB_SEGMENT_NAME_LENGTH = 23 + _TILL_LENGTH = 4 + + def __init__(self, pluggable_adapter, prefix=None): + """ + Class constructor. + + :param redis_client: Redis client or compliant interface. + :type redis_client: splitio.storage.adapters.redis.RedisAdapter + """ + self._pluggable_adapter = pluggable_adapter + self._prefix = "SPLITIO.rbsegment.${segmen_name}" + self._rb_segments_till_prefix = "SPLITIO.rbsegments.till" + if prefix is not None: + self._prefix = prefix + "." + self._prefix + self._rb_segments_till_prefix = prefix + "." + self._rb_segments_till_prefix + + def get(self, segment_name): + """ + Retrieve a rule based segment. + + :param segment_name: Name of the segment to fetch. + :type segment_name: str + + :rtype: str + """ + pass + + def get_change_number(self): + """ + Retrieve latest rule based segment change number. + + :rtype: int + """ + pass + + def contains(self, segment_names): + """ + Return whether the segments exists in rule based segment in cache. + + :param segment_names: segment name to validate. + :type segment_names: str + + :return: True if segment names exists. False otherwise. + :rtype: bool + """ + pass + + def get_segment_names(self): + """ + Retrieve a list of all excluded segments names. + + :return: List of segment names. + :rtype: list(str) + """ + pass + + def update(self, to_add, to_delete, new_change_number): + """ + Update rule based segment.. + + :param to_add: List of rule based segment. to add + :type to_add: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param to_delete: List of rule based segment. to delete + :type to_delete: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param new_change_number: New change number. + :type new_change_number: int + """ + raise NotImplementedError('Only redis-consumer mode is supported.') + + def get_large_segment_names(self): + """ + Retrieve a list of all excluded large segments names. + + :return: List of segment names. + :rtype: list(str) + """ + pass + +class PluggableRuleBasedSegmentsStorage(PluggableRuleBasedSegmentsStorageBase): + """RedPluggable storage for rule based segments.""" + + def __init__(self, pluggable_adapter, prefix=None): + """ + Class constructor. + + :param redis_client: Redis client or compliant interface. + :type redis_client: splitio.storage.adapters.redis.RedisAdapter + """ + PluggableRuleBasedSegmentsStorageBase.__init__(self, pluggable_adapter, prefix) + + def get(self, segment_name): + """ + Retrieve a rule based segment. + + :param segment_name: Name of the segment to fetch. + :type segment_name: str + + :rtype: str + """ + try: + rb_segment = self._pluggable_adapter.get(self._prefix.format(segment_name=segment_name)) + if not rb_segment: + return None + + return rule_based_segments.from_raw(rb_segment) + + except Exception: + _LOGGER.error('Error getting rule based segment from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + + def get_change_number(self): + """ + Retrieve latest rule based segment change number. + + :rtype: int + """ + try: + return self._pluggable_adapter.get(self._rb_segments_till_prefix) + + except Exception: + _LOGGER.error('Error getting change number in rule based segment storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + + def contains(self, segment_names): + """ + Return whether the segments exists in rule based segment in cache. + + :param segment_names: segment name to validate. + :type segment_names: str + + :return: True if segment names exists. False otherwise. + :rtype: bool + """ + return set(segment_names).issubset(self.get_segment_names()) + + def get_segment_names(self): + """ + Retrieve a list of all rule based segments names. + + :return: List of segment names. + :rtype: list(str) + """ + try: + keys = [] + for key in self._pluggable_adapter.get_keys_by_prefix(self._prefix[:-self._RB_SEGMENT_NAME_LENGTH]): + if key[-self._TILL_LENGTH:] != 'till': + keys.append(key[len(self._prefix[:-self._RB_SEGMENT_NAME_LENGTH]):]) + return keys + + except Exception: + _LOGGER.error('Error getting rule based segments names from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + +class PluggableRuleBasedSegmentsStorageAsync(RuleBasedSegmentsStorage): + """RedPluggable storage for rule based segments.""" + + def __init__(self, pluggable_adapter, prefix=None): + """ + Class constructor. + + :param redis_client: Redis client or compliant interface. + :type redis_client: splitio.storage.adapters.redis.RedisAdapter + """ + PluggableRuleBasedSegmentsStorageBase.__init__(self, pluggable_adapter, prefix) + + async def get(self, segment_name): + """ + Retrieve a rule based segment. + + :param segment_name: Name of the segment to fetch. + :type segment_name: str + + :rtype: str + """ + try: + rb_segment = await self._pluggable_adapter.get(self._prefix.format(segment_name=segment_name)) + if not rb_segment: + return None + + return rule_based_segments.from_raw(rb_segment) + + except Exception: + _LOGGER.error('Error getting rule based segment from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + + async def get_change_number(self): + """ + Retrieve latest rule based segment change number. + + :rtype: int + """ + try: + return await self._pluggable_adapter.get(self._rb_segments_till_prefix) + + except Exception: + _LOGGER.error('Error getting change number in rule based segment storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + + async def contains(self, segment_names): + """ + Return whether the segments exists in rule based segment in cache. + + :param segment_names: segment name to validate. + :type segment_names: str + + :return: True if segment names exists. False otherwise. + :rtype: bool + """ + return await set(segment_names).issubset(self.get_segment_names()) + + async def get_segment_names(self): + """ + Retrieve a list of all rule based segments names. + + :return: List of segment names. + :rtype: list(str) + """ + try: + keys = [] + for key in await self._pluggable_adapter.get_keys_by_prefix(self._prefix[:-self._RB_SEGMENT_NAME_LENGTH]): + if key[-self._TILL_LENGTH:] != 'till': + keys.append(key[len(self._prefix[:-self._RB_SEGMENT_NAME_LENGTH]):]) + return keys + + except Exception: + _LOGGER.error('Error getting rule based segments names from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + class PluggableSplitStorageBase(SplitStorage): """InMemory implementation of a feature flag storage.""" @@ -90,7 +328,7 @@ def update(self, to_add, to_delete, new_change_number): :param new_change_number: New change number. :type new_change_number: int """ -# pass + pass # try: # split = self.get(feature_flag_name) # if not split: diff --git a/splitio/storage/redis.py b/splitio/storage/redis.py index 982e0213..60b532e9 100644 --- a/splitio/storage/redis.py +++ b/splitio/storage/redis.py @@ -4,10 +4,10 @@ import threading from splitio.models.impressions import Impression -from splitio.models import splits, segments +from splitio.models import splits, segments, rule_based_segments from splitio.models.telemetry import TelemetryConfig, TelemetryConfigAsync from splitio.storage import SplitStorage, SegmentStorage, ImpressionStorage, EventStorage, \ - ImpressionPipelinedStorage, TelemetryStorage, FlagSetsFilter + ImpressionPipelinedStorage, TelemetryStorage, FlagSetsFilter, RuleBasedSegmentsStorage from splitio.storage.adapters.redis import RedisAdapterException from splitio.storage.adapters.cache_trait import decorate as add_cache, DEFAULT_MAX_AGE from splitio.storage.adapters.cache_trait import LocalMemoryCache, LocalMemoryCacheAsync @@ -16,8 +16,238 @@ _LOGGER = logging.getLogger(__name__) MAX_TAGS = 10 +class RedisRuleBasedSegmentsStorage(RuleBasedSegmentsStorage): + """Redis-based storage for rule based segments.""" + + _RB_SEGMENT_KEY = 'SPLITIO.rbsegment.${segmen_name}' + _RB_SEGMENT_TILL_KEY = 'SPLITIO.rbsegments.till' + + def __init__(self, redis_client): + """ + Class constructor. + + :param redis_client: Redis client or compliant interface. + :type redis_client: splitio.storage.adapters.redis.RedisAdapter + """ + self._redis = redis_client + self._pipe = self._redis.pipeline + + def _get_key(self, segment_name): + """ + Use the provided feature_flag_name to build the appropriate redis key. + + :param feature_flag_name: Name of the feature flag to interact with in redis. + :type feature_flag_name: str + + :return: Redis key. + :rtype: str. + """ + return self._RB_SEGMENT_KEY.format(segment_name=segment_name) + + def get(self, segment_name): + """ + Retrieve a rule based segment. + + :param segment_name: Name of the segment to fetch. + :type segment_name: str + + :rtype: str + """ + try: + raw = self._redis.get(self._get_key(segment_name)) + _LOGGER.debug("Fetchting rule based segment [%s] from redis" % segment_name) + _LOGGER.debug(raw) + return rule_based_segments.from_raw(json.loads(raw)) if raw is not None else None + + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segment from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + + def update(self, to_add, to_delete, new_change_number): + """ + Update rule based segment.. + + :param to_add: List of rule based segment. to add + :type to_add: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param to_delete: List of rule based segment. to delete + :type to_delete: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param new_change_number: New change number. + :type new_change_number: int + """ + raise NotImplementedError('Only redis-consumer mode is supported.') + + def get_change_number(self): + """ + Retrieve latest rule based segment change number. + + :rtype: int + """ + try: + stored_value = self._redis.get(self._RB_SEGMENT_TILL_KEY) + _LOGGER.debug("Fetching rule based segment Change Number from redis: %s" % stored_value) + return json.loads(stored_value) if stored_value is not None else None + + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segment change number from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + + def contains(self, segment_names): + """ + Return whether the segments exists in rule based segment in cache. + + :param segment_names: segment name to validate. + :type segment_names: str + + :return: True if segment names exists. False otherwise. + :rtype: bool + """ + return set(segment_names).issubset(self.get_segment_names()) + + def get_segment_names(self): + """ + Retrieve a list of all rule based segments names. + + :return: List of segment names. + :rtype: list(str) + """ + try: + keys = self._redis.keys(self._get_key('*')) + _LOGGER.debug("Fetchting rule based segments names from redis: %s" % keys) + return [key.replace(self._get_key(''), '') for key in keys] + + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segments names from storage') + _LOGGER.debug('Error: ', exc_info=True) + return [] + + def get_large_segment_names(self): + """ + Retrieve a list of all excluded large segments names. + + :return: List of segment names. + :rtype: list(str) + """ + pass + +class RedisRuleBasedSegmentsStorageAsync(RuleBasedSegmentsStorage): + """Redis-based storage for rule based segments.""" + + _RB_SEGMENT_KEY = 'SPLITIO.rbsegment.${segmen_name}' + _RB_SEGMENT_TILL_KEY = 'SPLITIO.rbsegments.till' + + def __init__(self, redis_client): + """ + Class constructor. + + :param redis_client: Redis client or compliant interface. + :type redis_client: splitio.storage.adapters.redis.RedisAdapter + """ + self._redis = redis_client + self._pipe = self._redis.pipeline + + def _get_key(self, segment_name): + """ + Use the provided feature_flag_name to build the appropriate redis key. + + :param feature_flag_name: Name of the feature flag to interact with in redis. + :type feature_flag_name: str + + :return: Redis key. + :rtype: str. + """ + return self._RB_SEGMENT_KEY.format(segment_name=segment_name) + + async def get(self, segment_name): + """ + Retrieve a rule based segment. + + :param segment_name: Name of the segment to fetch. + :type segment_name: str + + :rtype: str + """ + try: + raw = await self._redis.get(self._get_key(segment_name)) + _LOGGER.debug("Fetchting rule based segment [%s] from redis" % segment_name) + _LOGGER.debug(raw) + return rule_based_segments.from_raw(json.loads(raw)) if raw is not None else None + + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segment from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + + async def update(self, to_add, to_delete, new_change_number): + """ + Update rule based segment.. + + :param to_add: List of rule based segment. to add + :type to_add: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param to_delete: List of rule based segment. to delete + :type to_delete: list[splitio.models.rule_based_segments.RuleBasedSegment] + :param new_change_number: New change number. + :type new_change_number: int + """ + raise NotImplementedError('Only redis-consumer mode is supported.') + + async def get_change_number(self): + """ + Retrieve latest rule based segment change number. + + :rtype: int + """ + try: + stored_value = await self._redis.get(self._RB_SEGMENT_TILL_KEY) + _LOGGER.debug("Fetching rule based segment Change Number from redis: %s" % stored_value) + return json.loads(stored_value) if stored_value is not None else None + + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segment change number from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + + async def contains(self, segment_names): + """ + Return whether the segments exists in rule based segment in cache. + + :param segment_names: segment name to validate. + :type segment_names: str + + :return: True if segment names exists. False otherwise. + :rtype: bool + """ + return set(segment_names).issubset(await self.get_segment_names()) + + async def get_segment_names(self): + """ + Retrieve a list of all rule based segments names. + + :return: List of segment names. + :rtype: list(str) + """ + try: + keys = await self._redis.keys(self._get_key('*')) + _LOGGER.debug("Fetchting rule based segments names from redis: %s" % keys) + return [key.replace(self._get_key(''), '') for key in keys] + + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segments names from storage') + _LOGGER.debug('Error: ', exc_info=True) + return [] + + async def get_large_segment_names(self): + """ + Retrieve a list of all excluded large segments names. + + :return: List of segment names. + :rtype: list(str) + """ + pass + class RedisSplitStorageBase(SplitStorage): - """Redis-based storage base for s.""" + """Redis-based storage base for feature flags.""" _FEATURE_FLAG_KEY = 'SPLITIO.split.{feature_flag_name}' _FEATURE_FLAG_TILL_KEY = 'SPLITIO.splits.till' diff --git a/splitio/sync/split.py b/splitio/sync/split.py index d0e4690c..4d4d5a5a 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -456,10 +456,10 @@ def _make_whitelist_condition(whitelist, treatment): 'combiner': 'AND' } } - - def _sanitize_feature_flag(self, parsed): + + def _sanitize_json_elements(self, parsed): """ - implement Sanitization if neded. + Sanitize all json elements. :param parsed: feature flags, till and since elements dict :type parsed: Dict @@ -467,14 +467,14 @@ def _sanitize_feature_flag(self, parsed): :return: sanitized structure dict :rtype: Dict """ - parsed = self._sanitize_json_elements(parsed) - parsed['splits'] = self._sanitize_feature_flag_elements(parsed['splits']) - + parsed = self._satitize_json_section(parsed, 'ff') + parsed = self._satitize_json_section(parsed, 'rbs') + return parsed - def _sanitize_json_elements(self, parsed): + def _satitize_json_section(self, parsed, section_name): """ - Sanitize all json elements. + Sanitize specific json section. :param parsed: feature flags, till and since elements dict :type parsed: Dict @@ -482,15 +482,17 @@ def _sanitize_json_elements(self, parsed): :return: sanitized structure dict :rtype: Dict """ - if 'splits' not in parsed: - parsed['splits'] = [] - if 'till' not in parsed or parsed['till'] is None or parsed['till'] < -1: - parsed['till'] = -1 - if 'since' not in parsed or parsed['since'] is None or parsed['since'] < -1 or parsed['since'] > parsed['till']: - parsed['since'] = parsed['till'] + if section_name not in parsed: + parsed['ff'] = {"t": -1, "s": -1, "d": []} + if 'd' not in parsed[section_name]: + parsed[section_name]['d'] = [] + if 't' not in parsed[section_name] or parsed[section_name]['t'] is None or parsed[section_name]['t'] < -1: + parsed[section_name]['t'] = -1 + if 's' not in parsed[section_name] or parsed[section_name]['s'] is None or parsed[section_name]['s'] < -1 or parsed[section_name]['s'] > parsed[section_name]['t']: + parsed[section_name]['s'] = parsed[section_name]['t'] return parsed - + def _sanitize_feature_flag_elements(self, parsed_feature_flags): """ Sanitize all feature flags elements. @@ -523,6 +525,29 @@ def _sanitize_feature_flag_elements(self, parsed_feature_flags): sanitized_feature_flags.append(feature_flag) return sanitized_feature_flags + def _sanitize_rb_segment_elements(self, parsed_rb_segments): + """ + Sanitize all rule based segments elements. + + :param parsed_rb_segments: rule based segments array + :type parsed_rb_segments: [Dict] + + :return: sanitized structure dict + :rtype: [Dict] + """ + sanitized_rb_segments = [] + for rb_segment in parsed_rb_segments: + if 'name' not in rb_segment or rb_segment['name'].strip() == '': + _LOGGER.warning("A rule based segment in json file does not have (Name) or property is empty, skipping.") + continue + for element in [('trafficTypeName', 'user', None, None, None, None), + ('status', 'ACTIVE', None, None, ['ACTIVE', 'ARCHIVED'], None), + ('changeNumber', 0, 0, None, None, None)]: + rb_segment = util._sanitize_object_element(rb_segment, 'rule based segment', element[0], element[1], lower_value=element[2], upper_value=element[3], in_list=element[4], not_in_list=element[5]) + rb_segment = self._sanitize_condition(rb_segment) + sanitized_rb_segments.append(rb_segment) + return sanitized_rb_segments + def _sanitize_condition(self, feature_flag): """ Sanitize feature flag and ensure a condition type ROLLOUT and matcher exist with ALL_KEYS elements. @@ -601,7 +626,7 @@ def _convert_yaml_to_feature_flag(cls, parsed): class LocalSplitSynchronizer(LocalSplitSynchronizerBase): """Localhost mode feature_flag synchronizer.""" - def __init__(self, filename, feature_flag_storage, localhost_mode=LocalhostMode.LEGACY): + def __init__(self, filename, feature_flag_storage, rule_based_segment_storage, localhost_mode=LocalhostMode.LEGACY): """ Class constructor. @@ -614,6 +639,7 @@ def __init__(self, filename, feature_flag_storage, localhost_mode=LocalhostMode. """ self._filename = filename self._feature_flag_storage = feature_flag_storage + self._rule_based_segment_storage = rule_based_segment_storage self._localhost_mode = localhost_mode self._current_json_sha = "-1" @@ -706,18 +732,23 @@ def _synchronize_json(self): :rtype: [str] """ try: - fetched, till = self._read_feature_flags_from_json_file(self._filename) + parsed = self._read_feature_flags_from_json_file(self._filename) segment_list = set() - fecthed_sha = util._get_sha(json.dumps(fetched)) + fecthed_sha = util._get_sha(json.dumps(parsed)) if fecthed_sha == self._current_json_sha: return [] self._current_json_sha = fecthed_sha - if self._feature_flag_storage.get_change_number() > till and till != self._DEFAULT_FEATURE_FLAG_TILL: + if self._feature_flag_storage.get_change_number() > parsed['ff']['t'] and parsed['ff']['t'] != self._DEFAULT_FEATURE_FLAG_TILL: return [] - fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in fetched] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, till) + fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] + segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) + + if self._rule_based_segment_storage.get_change_number() <= parsed['rbs']['t'] or parsed['rbs']['t'] == self._DEFAULT_FEATURE_FLAG_TILL: + fetched_rb_segments = [rule_based_segments.from_raw(rb_segment) for rb_segment in parsed['rbs']['d']] + segment_list.update(update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rb_segments, parsed['rbs']['t'])) + return segment_list except Exception as exc: @@ -737,8 +768,11 @@ def _read_feature_flags_from_json_file(self, filename): try: with open(filename, 'r') as flo: parsed = json.load(flo) - santitized = self._sanitize_feature_flag(parsed) - return santitized['splits'], santitized['till'] + santitized = self._sanitize_json_elements(parsed) + santitized['ff'] = self._sanitize_feature_flag_elements(santitized['ff']) + santitized['rbs'] = self._sanitize_rb_segment_elements(santitized['rbs']) + return santitized + except Exception as exc: _LOGGER.debug('Exception: ', exc_info=True) raise ValueError("Error parsing file %s. Make sure it's readable." % filename) from exc @@ -747,7 +781,7 @@ def _read_feature_flags_from_json_file(self, filename): class LocalSplitSynchronizerAsync(LocalSplitSynchronizerBase): """Localhost mode async feature_flag synchronizer.""" - def __init__(self, filename, feature_flag_storage, localhost_mode=LocalhostMode.LEGACY): + def __init__(self, filename, feature_flag_storage, rule_based_segment_storage, localhost_mode=LocalhostMode.LEGACY): """ Class constructor. @@ -760,6 +794,7 @@ def __init__(self, filename, feature_flag_storage, localhost_mode=LocalhostMode. """ self._filename = filename self._feature_flag_storage = feature_flag_storage + self._rule_based_segment_storage = rule_based_segment_storage self._localhost_mode = localhost_mode self._current_json_sha = "-1" @@ -853,18 +888,23 @@ async def _synchronize_json(self): :rtype: [str] """ try: - fetched, till = await self._read_feature_flags_from_json_file(self._filename) + parsed = await self._read_feature_flags_from_json_file(self._filename) segment_list = set() - fecthed_sha = util._get_sha(json.dumps(fetched)) + fecthed_sha = util._get_sha(json.dumps(parsed)) if fecthed_sha == self._current_json_sha: return [] self._current_json_sha = fecthed_sha - if await self._feature_flag_storage.get_change_number() > till and till != self._DEFAULT_FEATURE_FLAG_TILL: + if await self._feature_flag_storage.get_change_number() > parsed['ff']['t'] and parsed['ff']['t'] != self._DEFAULT_FEATURE_FLAG_TILL: return [] - fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in fetched] - segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, till) + fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] + segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) + + if await self._rule_based_segment_storage.get_change_number() <= parsed['rbs']['t'] or parsed['rbs']['t'] == self._DEFAULT_FEATURE_FLAG_TILL: + fetched_rb_segments = [rule_based_segments.from_raw(rb_segment) for rb_segment in parsed['rbs']['d']] + segment_list.update(await update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rb_segments, parsed['rbs']['t'])) + return segment_list except Exception as exc: @@ -884,8 +924,10 @@ async def _read_feature_flags_from_json_file(self, filename): try: async with aiofiles.open(filename, 'r') as flo: parsed = json.loads(await flo.read()) - santitized = self._sanitize_feature_flag(parsed) - return santitized['splits'], santitized['till'] + santitized = self._sanitize_json_elements(parsed) + santitized['ff'] = self._sanitize_feature_flag_elements(santitized['ff']) + santitized['rbs'] = self._sanitize_rb_segment_elements(santitized['rbs']) + return santitized except Exception as exc: _LOGGER.debug('Exception: ', exc_info=True) raise ValueError("Error parsing file %s. Make sure it's readable." % filename) from exc From 4d8327c84cdb0036899fa7f1639a7980b79ae7de Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Thu, 13 Mar 2025 15:05:04 -0300 Subject: [PATCH 14/56] Updated redis, pluggable and localjson storages --- splitio/models/rule_based_segments.py | 30 ++- splitio/storage/pluggable.py | 20 +- splitio/storage/redis.py | 4 +- splitio/sync/split.py | 20 +- tests/integration/__init__.py | 4 + tests/storage/test_pluggable.py | 128 +++++++++++- tests/storage/test_redis.py | 163 ++++++++++++++- tests/sync/test_splits_synchronizer.py | 274 +++++++++++++------------ tests/sync/test_synchronizer.py | 2 - 9 files changed, 482 insertions(+), 163 deletions(-) diff --git a/splitio/models/rule_based_segments.py b/splitio/models/rule_based_segments.py index 4ff548b2..66ec7ddf 100644 --- a/splitio/models/rule_based_segments.py +++ b/splitio/models/rule_based_segments.py @@ -11,14 +11,14 @@ class RuleBasedSegment(object): """RuleBasedSegment object class.""" - def __init__(self, name, traffic_yype_Name, change_number, status, conditions, excluded): + def __init__(self, name, traffic_type_name, change_number, status, conditions, excluded): """ Class constructor. :param name: Segment name. :type name: str - :param traffic_yype_Name: traffic type name. - :type traffic_yype_Name: str + :param traffic_type_name: traffic type name. + :type traffic_type_name: str :param change_number: change number. :type change_number: str :param status: status. @@ -29,7 +29,7 @@ def __init__(self, name, traffic_yype_Name, change_number, status, conditions, e :type excluded: Excluded """ self._name = name - self._traffic_yype_Name = traffic_yype_Name + self._traffic_type_name = traffic_type_name self._change_number = change_number self._status = status self._conditions = conditions @@ -41,9 +41,9 @@ def name(self): return self._name @property - def traffic_yype_Name(self): + def traffic_type_name(self): """Return traffic type name.""" - return self._traffic_yype_Name + return self._traffic_type_name @property def change_number(self): @@ -65,6 +65,17 @@ def excluded(self): """Return excluded.""" return self._excluded + def to_json(self): + """Return a JSON representation of this rule based segment.""" + return { + 'changeNumber': self.change_number, + 'trafficTypeName': self.traffic_type_name, + 'name': self.name, + 'status': self.status, + 'conditions': [c.to_json() for c in self.conditions], + 'excluded': self.excluded.to_json() + } + def from_raw(raw_rule_based_segment): """ Parse a Rule based segment from a JSON portion of splitChanges. @@ -111,3 +122,10 @@ def get_excluded_keys(self): def get_excluded_segments(self): """Return excluded segments""" return self._segments + + def to_json(self): + """Return a JSON representation of this object.""" + return { + 'keys': self._keys, + 'segments': self._segments + } diff --git a/splitio/storage/pluggable.py b/splitio/storage/pluggable.py index 1cb7e054..66fad1e5 100644 --- a/splitio/storage/pluggable.py +++ b/splitio/storage/pluggable.py @@ -17,7 +17,6 @@ class PluggableRuleBasedSegmentsStorageBase(RuleBasedSegmentsStorage): """RedPluggable storage for rule based segments.""" - _RB_SEGMENT_NAME_LENGTH = 23 _TILL_LENGTH = 4 def __init__(self, pluggable_adapter, prefix=None): @@ -28,9 +27,11 @@ def __init__(self, pluggable_adapter, prefix=None): :type redis_client: splitio.storage.adapters.redis.RedisAdapter """ self._pluggable_adapter = pluggable_adapter - self._prefix = "SPLITIO.rbsegment.${segmen_name}" + self._prefix = "SPLITIO.rbsegment.{segment_name}" self._rb_segments_till_prefix = "SPLITIO.rbsegments.till" + self._rb_segment_name_length = 18 if prefix is not None: + self._rb_segment_name_length += len(prefix) + 1 self._prefix = prefix + "." + self._prefix self._rb_segments_till_prefix = prefix + "." + self._rb_segments_till_prefix @@ -163,10 +164,13 @@ def get_segment_names(self): :rtype: list(str) """ try: + _LOGGER.error(self._rb_segment_name_length) + _LOGGER.error(self._prefix) + _LOGGER.error(self._prefix[:self._rb_segment_name_length]) keys = [] - for key in self._pluggable_adapter.get_keys_by_prefix(self._prefix[:-self._RB_SEGMENT_NAME_LENGTH]): + for key in self._pluggable_adapter.get_keys_by_prefix(self._prefix[:self._rb_segment_name_length]): if key[-self._TILL_LENGTH:] != 'till': - keys.append(key[len(self._prefix[:-self._RB_SEGMENT_NAME_LENGTH]):]) + keys.append(key[len(self._prefix[:self._rb_segment_name_length]):]) return keys except Exception: @@ -174,7 +178,7 @@ def get_segment_names(self): _LOGGER.debug('Error: ', exc_info=True) return None -class PluggableRuleBasedSegmentsStorageAsync(RuleBasedSegmentsStorage): +class PluggableRuleBasedSegmentsStorageAsync(PluggableRuleBasedSegmentsStorageBase): """RedPluggable storage for rule based segments.""" def __init__(self, pluggable_adapter, prefix=None): @@ -231,7 +235,7 @@ async def contains(self, segment_names): :return: True if segment names exists. False otherwise. :rtype: bool """ - return await set(segment_names).issubset(self.get_segment_names()) + return set(segment_names).issubset(await self.get_segment_names()) async def get_segment_names(self): """ @@ -242,9 +246,9 @@ async def get_segment_names(self): """ try: keys = [] - for key in await self._pluggable_adapter.get_keys_by_prefix(self._prefix[:-self._RB_SEGMENT_NAME_LENGTH]): + for key in await self._pluggable_adapter.get_keys_by_prefix(self._prefix[:self._rb_segment_name_length]): if key[-self._TILL_LENGTH:] != 'till': - keys.append(key[len(self._prefix[:-self._RB_SEGMENT_NAME_LENGTH]):]) + keys.append(key[len(self._prefix[:self._rb_segment_name_length]):]) return keys except Exception: diff --git a/splitio/storage/redis.py b/splitio/storage/redis.py index 60b532e9..e5398cf7 100644 --- a/splitio/storage/redis.py +++ b/splitio/storage/redis.py @@ -19,7 +19,7 @@ class RedisRuleBasedSegmentsStorage(RuleBasedSegmentsStorage): """Redis-based storage for rule based segments.""" - _RB_SEGMENT_KEY = 'SPLITIO.rbsegment.${segmen_name}' + _RB_SEGMENT_KEY = 'SPLITIO.rbsegment.{segment_name}' _RB_SEGMENT_TILL_KEY = 'SPLITIO.rbsegments.till' def __init__(self, redis_client): @@ -134,7 +134,7 @@ def get_large_segment_names(self): class RedisRuleBasedSegmentsStorageAsync(RuleBasedSegmentsStorage): """Redis-based storage for rule based segments.""" - _RB_SEGMENT_KEY = 'SPLITIO.rbsegment.${segmen_name}' + _RB_SEGMENT_KEY = 'SPLITIO.rbsegment.{segment_name}' _RB_SEGMENT_TILL_KEY = 'SPLITIO.rbsegments.till' def __init__(self, redis_client): diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 4d4d5a5a..58ea900a 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -545,6 +545,7 @@ def _sanitize_rb_segment_elements(self, parsed_rb_segments): ('changeNumber', 0, 0, None, None, None)]: rb_segment = util._sanitize_object_element(rb_segment, 'rule based segment', element[0], element[1], lower_value=element[2], upper_value=element[3], in_list=element[4], not_in_list=element[5]) rb_segment = self._sanitize_condition(rb_segment) + rb_segment = self._remove_partition(rb_segment) sanitized_rb_segments.append(rb_segment) return sanitized_rb_segments @@ -599,6 +600,15 @@ def _sanitize_condition(self, feature_flag): }) return feature_flag + + def _remove_partition(self, rb_segment): + sanitized = [] + for condition in rb_segment['conditions']: + if 'partition' in condition: + del condition['partition'] + sanitized.append(condition) + rb_segment['conditions'] = sanitized + return rb_segment @classmethod def _convert_yaml_to_feature_flag(cls, parsed): @@ -769,8 +779,8 @@ def _read_feature_flags_from_json_file(self, filename): with open(filename, 'r') as flo: parsed = json.load(flo) santitized = self._sanitize_json_elements(parsed) - santitized['ff'] = self._sanitize_feature_flag_elements(santitized['ff']) - santitized['rbs'] = self._sanitize_rb_segment_elements(santitized['rbs']) + santitized['ff']['d'] = self._sanitize_feature_flag_elements(santitized['ff']['d']) + santitized['rbs']['d'] = self._sanitize_rb_segment_elements(santitized['rbs']['d']) return santitized except Exception as exc: @@ -903,7 +913,7 @@ async def _synchronize_json(self): if await self._rule_based_segment_storage.get_change_number() <= parsed['rbs']['t'] or parsed['rbs']['t'] == self._DEFAULT_FEATURE_FLAG_TILL: fetched_rb_segments = [rule_based_segments.from_raw(rb_segment) for rb_segment in parsed['rbs']['d']] - segment_list.update(await update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rb_segments, parsed['rbs']['t'])) + segment_list.update(await update_rule_based_segment_storage_async(self._rule_based_segment_storage, fetched_rb_segments, parsed['rbs']['t'])) return segment_list @@ -925,8 +935,8 @@ async def _read_feature_flags_from_json_file(self, filename): async with aiofiles.open(filename, 'r') as flo: parsed = json.loads(await flo.read()) santitized = self._sanitize_json_elements(parsed) - santitized['ff'] = self._sanitize_feature_flag_elements(santitized['ff']) - santitized['rbs'] = self._sanitize_rb_segment_elements(santitized['rbs']) + santitized['ff']['d'] = self._sanitize_feature_flag_elements(santitized['ff']['d']) + santitized['rbs']['d'] = self._sanitize_rb_segment_elements(santitized['rbs']['d']) return santitized except Exception as exc: _LOGGER.debug('Exception: ', exc_info=True) diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index ee2475df..ab6e3293 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -47,3 +47,7 @@ "splitChange6_2": split62, "splitChange6_3": split63, } + +rbsegments_json = { + "segment1": {"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []} +} \ No newline at end of file diff --git a/tests/storage/test_pluggable.py b/tests/storage/test_pluggable.py index 439049e5..953a4510 100644 --- a/tests/storage/test_pluggable.py +++ b/tests/storage/test_pluggable.py @@ -1,20 +1,21 @@ """Pluggable storage test module.""" import json import threading +import copy import pytest from splitio.optional.loaders import asyncio from splitio.models.splits import Split -from splitio.models import splits, segments +from splitio.models import splits, segments, rule_based_segments from splitio.models.segments import Segment from splitio.models.impressions import Impression from splitio.models.events import Event, EventWrapper from splitio.storage.pluggable import PluggableSplitStorage, PluggableSegmentStorage, PluggableImpressionsStorage, PluggableEventsStorage, \ PluggableTelemetryStorage, PluggableEventsStorageAsync, PluggableSegmentStorageAsync, PluggableImpressionsStorageAsync,\ - PluggableSplitStorageAsync, PluggableTelemetryStorageAsync + PluggableSplitStorageAsync, PluggableTelemetryStorageAsync, PluggableRuleBasedSegmentsStorage, PluggableRuleBasedSegmentsStorageAsync from splitio.client.util import get_metadata, SdkMetadata from splitio.models.telemetry import MAX_TAGS, MethodExceptionsAndLatencies, OperationMode -from tests.integration import splits_json +from tests.integration import splits_json, rbsegments_json class StorageMockAdapter(object): def __init__(self): @@ -1372,3 +1373,124 @@ async def test_push_config_stats(self): await pluggable_telemetry_storage.record_active_and_redundant_factories(2, 1) await pluggable_telemetry_storage.push_config_stats() assert(self.mock_adapter._keys[pluggable_telemetry_storage._telemetry_config_key + "::" + pluggable_telemetry_storage._sdk_metadata] == '{"aF": 2, "rF": 1, "sT": "memory", "oM": 0, "t": []}') + +class PluggableRuleBasedSegmentStorageTests(object): + """In memory rule based segment storage test cases.""" + + def setup_method(self): + """Prepare storages with test data.""" + self.mock_adapter = StorageMockAdapter() + + def test_get(self): + self.mock_adapter._keys = {} + for sprefix in [None, 'myprefix']: + pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) + + rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) + rbs_name = rbsegments_json['segment1']['name'] + + self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs_name), rbs1.to_json()) + assert(pluggable_rbs_storage.get(rbs_name).to_json() == rule_based_segments.from_raw(rbsegments_json['segment1']).to_json()) + assert(pluggable_rbs_storage.get('not_existing') == None) + + def test_get_change_number(self): + self.mock_adapter._keys = {} + for sprefix in [None, 'myprefix']: + pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) + if sprefix == 'myprefix': + prefix = 'myprefix.' + else: + prefix = '' + self.mock_adapter.set(prefix + "SPLITIO.rbsegments.till", 1234) + assert(pluggable_rbs_storage.get_change_number() == 1234) + + def test_get_segment_names(self): + self.mock_adapter._keys = {} + for sprefix in [None, 'myprefix']: + pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) + rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs2_temp['name'] = 'another_segment' + rbs2 = rule_based_segments.from_raw(rbs2_temp) + self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) + self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs2.name), rbs2.to_json()) + assert(pluggable_rbs_storage.get_segment_names() == [rbs1.name, rbs2.name]) + + def test_contains(self): + self.mock_adapter._keys = {} + for sprefix in [None, 'myprefix']: + pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) + rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs2_temp['name'] = 'another_segment' + rbs2 = rule_based_segments.from_raw(rbs2_temp) + self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) + self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs2.name), rbs2.to_json()) + + assert(pluggable_rbs_storage.contains([rbs1.name, rbs2.name])) + assert(pluggable_rbs_storage.contains([rbs2.name])) + assert(not pluggable_rbs_storage.contains(['none-exists', rbs2.name])) + assert(not pluggable_rbs_storage.contains(['none-exists', 'none-exists2'])) + +class PluggableRuleBasedSegmentStorageAsyncTests(object): + """In memory rule based segment storage test cases.""" + + def setup_method(self): + """Prepare storages with test data.""" + self.mock_adapter = StorageMockAdapterAsync() + + @pytest.mark.asyncio + async def test_get(self): + self.mock_adapter._keys = {} + for sprefix in [None, 'myprefix']: + pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) + + rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) + rbs_name = rbsegments_json['segment1']['name'] + + await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs_name), rbs1.to_json()) + rbs = await pluggable_rbs_storage.get(rbs_name) + assert(rbs.to_json() == rule_based_segments.from_raw(rbsegments_json['segment1']).to_json()) + assert(await pluggable_rbs_storage.get('not_existing') == None) + + @pytest.mark.asyncio + async def test_get_change_number(self): + self.mock_adapter._keys = {} + for sprefix in [None, 'myprefix']: + pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) + if sprefix == 'myprefix': + prefix = 'myprefix.' + else: + prefix = '' + await self.mock_adapter.set(prefix + "SPLITIO.rbsegments.till", 1234) + assert(await pluggable_rbs_storage.get_change_number() == 1234) + + @pytest.mark.asyncio + async def test_get_segment_names(self): + self.mock_adapter._keys = {} + for sprefix in [None, 'myprefix']: + pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) + rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs2_temp['name'] = 'another_segment' + rbs2 = rule_based_segments.from_raw(rbs2_temp) + await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) + await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs2.name), rbs2.to_json()) + assert(await pluggable_rbs_storage.get_segment_names() == [rbs1.name, rbs2.name]) + + @pytest.mark.asyncio + async def test_contains(self): + self.mock_adapter._keys = {} + for sprefix in [None, 'myprefix']: + pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) + rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs2_temp['name'] = 'another_segment' + rbs2 = rule_based_segments.from_raw(rbs2_temp) + await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) + await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs2.name), rbs2.to_json()) + + assert(await pluggable_rbs_storage.contains([rbs1.name, rbs2.name])) + assert(await pluggable_rbs_storage.contains([rbs2.name])) + assert(not await pluggable_rbs_storage.contains(['none-exists', rbs2.name])) + assert(not await pluggable_rbs_storage.contains(['none-exists', 'none-exists2'])) diff --git a/tests/storage/test_redis.py b/tests/storage/test_redis.py index cce9a43d..04ddfc60 100644 --- a/tests/storage/test_redis.py +++ b/tests/storage/test_redis.py @@ -12,7 +12,8 @@ from splitio.optional.loaders import asyncio from splitio.storage import FlagSetsFilter from splitio.storage.redis import RedisEventsStorage, RedisEventsStorageAsync, RedisImpressionsStorage, RedisImpressionsStorageAsync, \ - RedisSegmentStorage, RedisSegmentStorageAsync, RedisSplitStorage, RedisSplitStorageAsync, RedisTelemetryStorage, RedisTelemetryStorageAsync + RedisSegmentStorage, RedisSegmentStorageAsync, RedisSplitStorage, RedisSplitStorageAsync, RedisTelemetryStorage, RedisTelemetryStorageAsync, \ + RedisRuleBasedSegmentsStorage, RedisRuleBasedSegmentsStorageAsync from splitio.storage.adapters.redis import RedisAdapter, RedisAdapterException, build from redis.asyncio.client import Redis as aioredis from splitio.storage.adapters import redis @@ -1230,3 +1231,163 @@ async def expire(*args): await redis_telemetry.expire_keys('key', 12, 2, 2) assert(self.called) + +class RedisRuleBasedSegmentStorageTests(object): + """Redis rule based segment storage test cases.""" + + def test_get_segment(self, mocker): + """Test retrieving a rule based segment works.""" + adapter = mocker.Mock(spec=RedisAdapter) + adapter.get.return_value = '{"name": "some_segment"}' + from_raw = mocker.Mock() + mocker.patch('splitio.storage.redis.rule_based_segments.from_raw', new=from_raw) + + storage = RedisRuleBasedSegmentsStorage(adapter) + storage.get('some_segment') + + assert adapter.get.mock_calls == [mocker.call('SPLITIO.rbsegment.some_segment')] + assert from_raw.mock_calls == [mocker.call({"name": "some_segment"})] + + # Test that a missing split returns None and doesn't call from_raw + adapter.reset_mock() + from_raw.reset_mock() + adapter.get.return_value = None + result = storage.get('some_segment') + assert result is None + assert adapter.get.mock_calls == [mocker.call('SPLITIO.rbsegment.some_segment')] + assert not from_raw.mock_calls + + def test_get_changenumber(self, mocker): + """Test fetching changenumber.""" + adapter = mocker.Mock(spec=RedisAdapter) + storage = RedisRuleBasedSegmentsStorage(adapter) + adapter.get.return_value = '-1' + assert storage.get_change_number() == -1 + assert adapter.get.mock_calls == [mocker.call('SPLITIO.rbsegments.till')] + + def test_get_segment_names(self, mocker): + """Test getching rule based segment names.""" + adapter = mocker.Mock(spec=RedisAdapter) + storage = RedisRuleBasedSegmentsStorage(adapter) + adapter.keys.return_value = [ + 'SPLITIO.rbsegment.segment1', + 'SPLITIO.rbsegment.segment2', + 'SPLITIO.rbsegment.segment3' + ] + assert storage.get_segment_names() == ['segment1', 'segment2', 'segment3'] + + def test_contains(self, mocker): + """Test storage containing rule based segment names.""" + adapter = mocker.Mock(spec=RedisAdapter) + storage = RedisRuleBasedSegmentsStorage(adapter) + adapter.keys.return_value = [ + 'SPLITIO.rbsegment.segment1', + 'SPLITIO.rbsegment.segment2', + 'SPLITIO.rbsegment.segment3' + ] + assert storage.contains(['segment1', 'segment3']) + assert not storage.contains(['segment1', 'segment4']) + assert storage.contains(['segment1']) + assert not storage.contains(['segment4', 'segment5']) + +class RedisRuleBasedSegmentStorageAsyncTests(object): + """Redis rule based segment storage test cases.""" + + @pytest.mark.asyncio + async def test_get_segment(self, mocker): + """Test retrieving a rule based segment works.""" + redis_mock = await aioredis.from_url("redis://localhost") + adapter = redis.RedisAdapterAsync(redis_mock, 'some_prefix') + + self.redis_ret = None + self.name = None + async def get(sel, name): + self.name = name + self.redis_ret = '{"changeNumber": "12", "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []}' + return self.redis_ret + mocker.patch('splitio.storage.adapters.redis.RedisAdapterAsync.get', new=get) + + storage = RedisRuleBasedSegmentsStorageAsync(adapter) + await storage.get('some_segment') + + assert self.name == 'SPLITIO.rbsegment.some_segment' + assert self.redis_ret == '{"changeNumber": "12", "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []}' + + # Test that a missing split returns None and doesn't call from_raw + + self.name = None + async def get2(sel, name): + self.name = name + return None + mocker.patch('splitio.storage.adapters.redis.RedisAdapterAsync.get', new=get2) + + result = await storage.get('some_segment') + assert result is None + assert self.name == 'SPLITIO.rbsegment.some_segment' + + # Test that a missing split returns None and doesn't call from_raw + result = await storage.get('some_segment2') + assert result is None + + @pytest.mark.asyncio + async def test_get_changenumber(self, mocker): + """Test fetching changenumber.""" + redis_mock = await aioredis.from_url("redis://localhost") + adapter = redis.RedisAdapterAsync(redis_mock, 'some_prefix') + storage = RedisRuleBasedSegmentsStorageAsync(adapter) + + self.redis_ret = None + self.name = None + async def get(sel, name): + self.name = name + self.redis_ret = '-1' + return self.redis_ret + mocker.patch('splitio.storage.adapters.redis.RedisAdapterAsync.get', new=get) + + assert await storage.get_change_number() == -1 + assert self.name == 'SPLITIO.rbsegments.till' + + @pytest.mark.asyncio + async def test_get_segment_names(self, mocker): + """Test getching rule based segment names.""" + redis_mock = await aioredis.from_url("redis://localhost") + adapter = redis.RedisAdapterAsync(redis_mock, 'some_prefix') + storage = RedisRuleBasedSegmentsStorageAsync(adapter) + + self.key = None + self.keys_ret = None + async def keys(sel, key): + self.key = key + self.keys_ret = [ + 'SPLITIO.rbsegment.segment1', + 'SPLITIO.rbsegment.segment2', + 'SPLITIO.rbsegment.segment3' + ] + return self.keys_ret + mocker.patch('splitio.storage.adapters.redis.RedisAdapterAsync.keys', new=keys) + + assert await storage.get_segment_names() == ['segment1', 'segment2', 'segment3'] + + @pytest.mark.asyncio + async def test_contains(self, mocker): + """Test storage containing rule based segment names.""" + redis_mock = await aioredis.from_url("redis://localhost") + adapter = redis.RedisAdapterAsync(redis_mock, 'some_prefix') + storage = RedisRuleBasedSegmentsStorageAsync(adapter) + + self.key = None + self.keys_ret = None + async def keys(sel, key): + self.key = key + self.keys_ret = [ + 'SPLITIO.rbsegment.segment1', + 'SPLITIO.rbsegment.segment2', + 'SPLITIO.rbsegment.segment3' + ] + return self.keys_ret + mocker.patch('splitio.storage.adapters.redis.RedisAdapterAsync.keys', new=keys) + + assert await storage.contains(['segment1', 'segment3']) + assert not await storage.contains(['segment1', 'segment4']) + assert await storage.contains(['segment1']) + assert not await storage.contains(['segment4', 'segment5']) diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index 2acf293f..ce1ade7e 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -8,14 +8,14 @@ from splitio.util.backoff import Backoff from splitio.api import APIException from splitio.api.commons import FetchOptions -from splitio.storage import SplitStorage +from splitio.storage import SplitStorage, RuleBasedSegmentsStorage from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySplitStorageAsync, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.storage import FlagSetsFilter from splitio.models.splits import Split from splitio.models.rule_based_segments import RuleBasedSegment from splitio.sync.split import SplitSynchronizer, SplitSynchronizerAsync, LocalSplitSynchronizer, LocalSplitSynchronizerAsync, LocalhostMode from splitio.optional.loaders import aiofiles, asyncio -from tests.integration import splits_json +from tests.integration import splits_json, rbsegments_json splits_raw = [{ 'changeNumber': 123, @@ -861,12 +861,13 @@ async def get_changes(*args, **kwargs): class LocalSplitsSynchronizerTests(object): """Split synchronizer test cases.""" - splits = copy.deepcopy(splits_raw) + payload = copy.deepcopy(json_body) def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=SplitStorage) - split_synchronizer = LocalSplitSynchronizer("/incorrect_file", storage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + split_synchronizer = LocalSplitSynchronizer("/incorrect_file", storage, rbs_storage) with pytest.raises(Exception): split_synchronizer.synchronize_splits(1) @@ -874,74 +875,75 @@ def test_synchronize_splits_error(self, mocker): def test_synchronize_splits(self, mocker): """Test split sync.""" storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() - till = 123 def read_splits_from_json_file(*args, **kwargs): - return self.splits, till + return self.payload - split_synchronizer = LocalSplitSynchronizer("split.json", storage, LocalhostMode.JSON) + split_synchronizer = LocalSplitSynchronizer("split.json", storage, rbs_storage, LocalhostMode.JSON) split_synchronizer._read_feature_flags_from_json_file = read_splits_from_json_file split_synchronizer.synchronize_splits() - inserted_split = storage.get(self.splits[0]['name']) + inserted_split = storage.get(self.payload["ff"]["d"][0]['name']) assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' # Should sync when changenumber is not changed - self.splits[0]['killed'] = True + self.payload["ff"]["d"][0]['killed'] = True split_synchronizer.synchronize_splits() - inserted_split = storage.get(self.splits[0]['name']) + inserted_split = storage.get(self.payload["ff"]["d"][0]['name']) assert inserted_split.killed # Should not sync when changenumber is less than stored - till = 122 - self.splits[0]['killed'] = False + self.payload["ff"]["t"] = 122 + self.payload["ff"]["d"][0]['killed'] = False split_synchronizer.synchronize_splits() - inserted_split = storage.get(self.splits[0]['name']) + inserted_split = storage.get(self.payload["ff"]["d"][0]['name']) assert inserted_split.killed # Should sync when changenumber is higher than stored - till = 124 + self.payload["ff"]["t"] = 1675095324999 split_synchronizer._current_json_sha = "-1" split_synchronizer.synchronize_splits() - inserted_split = storage.get(self.splits[0]['name']) + inserted_split = storage.get(self.payload["ff"]["d"][0]['name']) assert inserted_split.killed == False # Should sync when till is default (-1) - till = -1 + self.payload["ff"]["t"] = -1 split_synchronizer._current_json_sha = "-1" - self.splits[0]['killed'] = True + self.payload["ff"]["d"][0]['killed'] = True split_synchronizer.synchronize_splits() - inserted_split = storage.get(self.splits[0]['name']) + inserted_split = storage.get(self.payload["ff"]["d"][0]['name']) assert inserted_split.killed == True def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorage(['set1', 'set2']) - - split = self.splits[0].copy() + rbs_storage = InMemoryRuleBasedSegmentStorage() + + split = self.payload["ff"]["d"][0].copy() split['name'] = 'second' - splits1 = [self.splits[0].copy(), split] - splits2 = self.splits.copy() - splits3 = self.splits.copy() - splits4 = self.splits.copy() + splits1 = [self.payload["ff"]["d"][0].copy(), split] + splits2 = self.payload["ff"]["d"].copy() + splits3 = self.payload["ff"]["d"].copy() + splits4 = self.payload["ff"]["d"].copy() self.called = 0 def read_feature_flags_from_json_file(*args, **kwargs): self.called += 1 if self.called == 1: - return splits1, 123 + return {"ff": {"d": splits1, "t": 123, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} elif self.called == 2: splits2[0]['sets'] = ['set3'] - return splits2, 124 + return {"ff": {"d": splits2, "t": 124, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} elif self.called == 3: splits3[0]['sets'] = ['set1'] - return splits3, 12434 + return {"ff": {"d": splits3, "t": 12434, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'new_split' - return splits4, 12438 + return {"ff": {"d": splits4, "t": 12438, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} - split_synchronizer = LocalSplitSynchronizer("split.json", storage, LocalhostMode.JSON) + split_synchronizer = LocalSplitSynchronizer("split.json", storage, rbs_storage, LocalhostMode.JSON) split_synchronizer._read_feature_flags_from_json_file = read_feature_flags_from_json_file split_synchronizer.synchronize_splits() @@ -959,30 +961,31 @@ def read_feature_flags_from_json_file(*args, **kwargs): def test_sync_flag_sets_without_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() - split = self.splits[0].copy() + split = self.payload["ff"]["d"][0].copy() split['name'] = 'second' - splits1 = [self.splits[0].copy(), split] - splits2 = self.splits.copy() - splits3 = self.splits.copy() - splits4 = self.splits.copy() + splits1 = [self.payload["ff"]["d"][0].copy(), split] + splits2 = self.payload["ff"]["d"].copy() + splits3 = self.payload["ff"]["d"].copy() + splits4 = self.payload["ff"]["d"].copy() self.called = 0 def read_feature_flags_from_json_file(*args, **kwargs): self.called += 1 if self.called == 1: - return splits1, 123 + return {"ff": {"d": splits1, "t": 123, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} elif self.called == 2: splits2[0]['sets'] = ['set3'] - return splits2, 124 + return {"ff": {"d": splits2, "t": 124, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} elif self.called == 3: splits3[0]['sets'] = ['set1'] - return splits3, 12434 + return {"ff": {"d": splits3, "t": 12434, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'third_split' - return splits4, 12438 + return {"ff": {"d": splits4, "t": 12438, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} - split_synchronizer = LocalSplitSynchronizer("split.json", storage, LocalhostMode.JSON) + split_synchronizer = LocalSplitSynchronizer("split.json", storage, rbs_storage, LocalhostMode.JSON) split_synchronizer._read_feature_flags_from_json_file = read_feature_flags_from_json_file split_synchronizer.synchronize_splits() @@ -1000,95 +1003,73 @@ def read_feature_flags_from_json_file(*args, **kwargs): def test_reading_json(self, mocker): """Test reading json file.""" f = open("./splits.json", "w") - json_body = {'splits': [{ - 'changeNumber': 123, - 'trafficTypeName': 'user', - 'name': 'some_name', - 'trafficAllocation': 100, - 'trafficAllocationSeed': 123456, - 'seed': 321654, - 'status': 'ACTIVE', - 'killed': False, - 'defaultTreatment': 'off', - 'algo': 2, - 'conditions': [ - { - 'partitions': [ - {'treatment': 'on', 'size': 50}, - {'treatment': 'off', 'size': 50} - ], - 'contitionType': 'WHITELIST', - 'label': 'some_label', - 'matcherGroup': { - 'matchers': [ - { - 'matcherType': 'WHITELIST', - 'whitelistMatcherData': { - 'whitelist': ['k1', 'k2', 'k3'] - }, - 'negate': False, - } - ], - 'combiner': 'AND' - } - } - ], - 'sets': ['set1'] - }], - "till":1675095324253, - "since":-1, - } - - f.write(json.dumps(json_body)) + f.write(json.dumps(self.payload)) f.close() storage = InMemorySplitStorage() - split_synchronizer = LocalSplitSynchronizer("./splits.json", storage, LocalhostMode.JSON) + rbs_storage = InMemoryRuleBasedSegmentStorage() + split_synchronizer = LocalSplitSynchronizer("./splits.json", storage, rbs_storage, LocalhostMode.JSON) split_synchronizer.synchronize_splits() - inserted_split = storage.get(json_body['splits'][0]['name']) + inserted_split = storage.get(self.payload['ff']['d'][0]['name']) assert isinstance(inserted_split, Split) - assert inserted_split.name == 'some_name' + assert inserted_split.name == self.payload['ff']['d'][0]['name'] + + inserted_rbs = rbs_storage.get(self.payload['rbs']['d'][0]['name']) + assert isinstance(inserted_rbs, RuleBasedSegment) + assert inserted_rbs.name == self.payload['rbs']['d'][0]['name'] os.remove("./splits.json") def test_json_elements_sanitization(self, mocker): """Test sanitization.""" - split_synchronizer = LocalSplitSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock()) + split_synchronizer = LocalSplitSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) # check no changes if all elements exist with valid values - parsed = {"splits": [], "since": -1, "till": -1} + parsed = {"ff": {"d": [], "s": -1, "t": -1}, "rbs": {"d": [], "s": -1, "t": -1}} assert (split_synchronizer._sanitize_json_elements(parsed) == parsed) # check set since to -1 when is None parsed2 = parsed.copy() - parsed2['since'] = None + parsed2['ff']['s'] = None assert (split_synchronizer._sanitize_json_elements(parsed2) == parsed) # check no changes if since > -1 parsed2 = parsed.copy() - parsed2['since'] = 12 + parsed2['ff']['s'] = 12 assert (split_synchronizer._sanitize_json_elements(parsed2) == parsed) # check set till to -1 when is None parsed2 = parsed.copy() - parsed2['till'] = None + parsed2['ff']['t'] = None assert (split_synchronizer._sanitize_json_elements(parsed2) == parsed) # check add since when missing - parsed2 = {"splits": [], "till": -1} + parsed2 = {"ff": {"d": [], "t": -1}, "rbs": {"d": [], "s": -1, "t": -1}} assert (split_synchronizer._sanitize_json_elements(parsed2) == parsed) # check add till when missing - parsed2 = {"splits": [], "since": -1} + parsed2 = {"ff": {"d": [], "s": -1}, "rbs": {"d": [], "s": -1, "t": -1}} assert (split_synchronizer._sanitize_json_elements(parsed2) == parsed) # check add splits when missing - parsed2 = {"since": -1, "till": -1} + parsed2 = {"ff": {"s": -1, "t": -1}, "rbs": {"d": [], "s": -1, "t": -1}} assert (split_synchronizer._sanitize_json_elements(parsed2) == parsed) - def test_split_elements_sanitization(self, mocker): + # check add since when missing + parsed2 = {"ff": {"d": [], "t": -1}, "rbs": {"d": [], "t": -1}} + assert (split_synchronizer._sanitize_json_elements(parsed2) == parsed) + + # check add till when missing + parsed2 = {"ff": {"d": [], "s": -1}, "rbs": {"d": [], "s": -1}} + assert (split_synchronizer._sanitize_json_elements(parsed2) == parsed) + + # check add splits when missing + parsed2 = {"ff": {"s": -1, "t": -1}, "rbs": {"s": -1, "t": -1}} + assert (split_synchronizer._sanitize_json_elements(parsed2) == parsed) + + def test_elements_sanitization(self, mocker): """Test sanitization.""" - split_synchronizer = LocalSplitSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock()) + split_synchronizer = LocalSplitSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) # No changes when split structure is good assert (split_synchronizer._sanitize_feature_flag_elements(splits_json["splitChange1_1"]["splits"]) == splits_json["splitChange1_1"]["splits"]) @@ -1183,7 +1164,21 @@ def test_split_elements_sanitization(self, mocker): split[0]['algo'] = 1 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['algo'] == 2) - def test_split_condition_sanitization(self, mocker): + # test 'status' is set to ACTIVE when None + rbs = copy.deepcopy(json_body["rbs"]["d"]) + rbs[0]['status'] = None + assert (split_synchronizer._sanitize_rb_segment_elements(rbs)[0]['status'] == 'ACTIVE') + + # test 'changeNumber' is set to 0 when invalid + rbs = copy.deepcopy(json_body["rbs"]["d"]) + rbs[0]['changeNumber'] = -2 + assert (split_synchronizer._sanitize_rb_segment_elements(rbs)[0]['changeNumber'] == 0) + + rbs = copy.deepcopy(json_body["rbs"]["d"]) + del rbs[0]['conditions'] + assert (len(split_synchronizer._sanitize_rb_segment_elements(rbs)[0]['conditions']) == 1) + + def test_condition_sanitization(self, mocker): """Test sanitization.""" split_synchronizer = LocalSplitSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock()) @@ -1218,13 +1213,14 @@ def test_split_condition_sanitization(self, mocker): class LocalSplitsSynchronizerAsyncTests(object): """Split synchronizer test cases.""" - splits = copy.deepcopy(splits_raw) + payload = copy.deepcopy(json_body) @pytest.mark.asyncio async def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=SplitStorage) - split_synchronizer = LocalSplitSynchronizerAsync("/incorrect_file", storage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + split_synchronizer = LocalSplitSynchronizerAsync("/incorrect_file", storage, rbs_storage) with pytest.raises(Exception): await split_synchronizer.synchronize_splits(1) @@ -1233,75 +1229,76 @@ async def test_synchronize_splits_error(self, mocker): async def test_synchronize_splits(self, mocker): """Test split sync.""" storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() - till = 123 async def read_splits_from_json_file(*args, **kwargs): - return self.splits, till + return self.payload - split_synchronizer = LocalSplitSynchronizerAsync("split.json", storage, LocalhostMode.JSON) + split_synchronizer = LocalSplitSynchronizerAsync("split.json", storage, rbs_storage, LocalhostMode.JSON) split_synchronizer._read_feature_flags_from_json_file = read_splits_from_json_file await split_synchronizer.synchronize_splits() - inserted_split = await storage.get(self.splits[0]['name']) + inserted_split = await storage.get(self.payload["ff"]["d"][0]['name']) assert isinstance(inserted_split, Split) assert inserted_split.name == 'some_name' # Should sync when changenumber is not changed - self.splits[0]['killed'] = True + self.payload["ff"]["d"][0]['killed'] = True await split_synchronizer.synchronize_splits() - inserted_split = await storage.get(self.splits[0]['name']) + inserted_split = await storage.get(self.payload["ff"]["d"][0]['name']) assert inserted_split.killed # Should not sync when changenumber is less than stored - till = 122 - self.splits[0]['killed'] = False + self.payload["ff"]["t"] = 122 + self.payload["ff"]["d"][0]['killed'] = False await split_synchronizer.synchronize_splits() - inserted_split = await storage.get(self.splits[0]['name']) + inserted_split = await storage.get(self.payload["ff"]["d"][0]['name']) assert inserted_split.killed # Should sync when changenumber is higher than stored - till = 124 + self.payload["ff"]["t"] = 1675095324999 split_synchronizer._current_json_sha = "-1" await split_synchronizer.synchronize_splits() - inserted_split = await storage.get(self.splits[0]['name']) + inserted_split = await storage.get(self.payload["ff"]["d"][0]['name']) assert inserted_split.killed == False # Should sync when till is default (-1) - till = -1 + self.payload["ff"]["t"] = -1 split_synchronizer._current_json_sha = "-1" - self.splits[0]['killed'] = True + self.payload["ff"]["d"][0]['killed'] = True await split_synchronizer.synchronize_splits() - inserted_split = await storage.get(self.splits[0]['name']) + inserted_split = await storage.get(self.payload["ff"]["d"][0]['name']) assert inserted_split.killed == True @pytest.mark.asyncio async def test_sync_flag_sets_with_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorageAsync(['set1', 'set2']) - - split = self.splits[0].copy() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + + split = self.payload["ff"]["d"][0].copy() split['name'] = 'second' - splits1 = [self.splits[0].copy(), split] - splits2 = self.splits.copy() - splits3 = self.splits.copy() - splits4 = self.splits.copy() + splits1 = [self.payload["ff"]["d"][0].copy(), split] + splits2 = self.payload["ff"]["d"].copy() + splits3 = self.payload["ff"]["d"].copy() + splits4 = self.payload["ff"]["d"].copy() self.called = 0 async def read_feature_flags_from_json_file(*args, **kwargs): self.called += 1 if self.called == 1: - return splits1, 123 + return {"ff": {"d": splits1, "t": 123, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} elif self.called == 2: splits2[0]['sets'] = ['set3'] - return splits2, 124 + return {"ff": {"d": splits2, "t": 124, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} elif self.called == 3: splits3[0]['sets'] = ['set1'] - return splits3, 12434 + return {"ff": {"d": splits3, "t": 12434, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'new_split' - return splits4, 12438 + return {"ff": {"d": splits4, "t": 12438, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} - split_synchronizer = LocalSplitSynchronizerAsync("split.json", storage, LocalhostMode.JSON) + split_synchronizer = LocalSplitSynchronizerAsync("split.json", storage, rbs_storage, LocalhostMode.JSON) split_synchronizer._read_feature_flags_from_json_file = read_feature_flags_from_json_file await split_synchronizer.synchronize_splits() @@ -1320,30 +1317,30 @@ async def read_feature_flags_from_json_file(*args, **kwargs): async def test_sync_flag_sets_without_config_sets(self, mocker): """Test split sync with flag sets.""" storage = InMemorySplitStorageAsync() - - split = self.splits[0].copy() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + + split = self.payload["ff"]["d"][0].copy() split['name'] = 'second' - splits1 = [self.splits[0].copy(), split] - splits2 = self.splits.copy() - splits3 = self.splits.copy() - splits4 = self.splits.copy() + splits1 = [self.payload["ff"]["d"][0].copy(), split] + splits2 = self.payload["ff"]["d"].copy() + splits3 = self.payload["ff"]["d"].copy() + splits4 = self.payload["ff"]["d"].copy() self.called = 0 async def read_feature_flags_from_json_file(*args, **kwargs): self.called += 1 if self.called == 1: - return splits1, 123 + return {"ff": {"d": splits1, "t": 123, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} elif self.called == 2: - splits2[0]['sets'] = ['set3'] - return splits2, 124 + return {"ff": {"d": splits2, "t": 124, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} elif self.called == 3: splits3[0]['sets'] = ['set1'] - return splits3, 12434 + return {"ff": {"d": splits3, "t": 12434, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} splits4[0]['sets'] = ['set6'] splits4[0]['name'] = 'third_split' - return splits4, 12438 + return {"ff": {"d": splits4, "t": 12438, "s": -1}, "rbs": {"d": [], "t": -1, "s": -1}} - split_synchronizer = LocalSplitSynchronizerAsync("split.json", storage, LocalhostMode.JSON) + split_synchronizer = LocalSplitSynchronizerAsync("split.json", storage, rbs_storage, LocalhostMode.JSON) split_synchronizer._read_feature_flags_from_json_file = read_feature_flags_from_json_file await split_synchronizer.synchronize_splits() @@ -1362,13 +1359,18 @@ async def read_feature_flags_from_json_file(*args, **kwargs): async def test_reading_json(self, mocker): """Test reading json file.""" async with aiofiles.open("./splits.json", "w") as f: - await f.write(json.dumps(json_body)) + await f.write(json.dumps(self.payload)) storage = InMemorySplitStorageAsync() - split_synchronizer = LocalSplitSynchronizerAsync("./splits.json", storage, LocalhostMode.JSON) + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + split_synchronizer = LocalSplitSynchronizerAsync("./splits.json", storage, rbs_storage, LocalhostMode.JSON) await split_synchronizer.synchronize_splits() - inserted_split = await storage.get(json_body['splits'][0]['name']) + inserted_split = await storage.get(self.payload['ff']['d'][0]['name']) assert isinstance(inserted_split, Split) - assert inserted_split.name == 'some_name' + assert inserted_split.name == self.payload['ff']['d'][0]['name'] + + inserted_rbs = await rbs_storage.get(self.payload['rbs']['d'][0]['name']) + assert isinstance(inserted_rbs, RuleBasedSegment) + assert inserted_rbs.name == self.payload['rbs']['d'][0]['name'] os.remove("./splits.json") diff --git a/tests/sync/test_synchronizer.py b/tests/sync/test_synchronizer.py index b2ef9fa0..1e89af66 100644 --- a/tests/sync/test_synchronizer.py +++ b/tests/sync/test_synchronizer.py @@ -671,7 +671,6 @@ def test_start_periodic_data_recording(self, mocker): assert len(impression_count_task.start.mock_calls) == 1 assert len(event_task.start.mock_calls) == 1 - class RedisSynchronizerTests(object): def test_start_periodic_data_recording(self, mocker): impression_count_task = mocker.Mock(spec=ImpressionsCountSyncTask) @@ -744,7 +743,6 @@ def stop_mock(event): assert len(unique_keys_task.stop.mock_calls) == 1 assert len(clear_filter_task.stop.mock_calls) == 1 - class RedisSynchronizerAsyncTests(object): @pytest.mark.asyncio async def test_start_periodic_data_recording(self, mocker): From 2cbc6474bc6ec5f41615f9f3f0df3ae50c4603cb Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Fri, 14 Mar 2025 11:47:13 -0300 Subject: [PATCH 15/56] Update splitio/storage/pluggable.py Co-authored-by: Emiliano Sanchez --- splitio/storage/pluggable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splitio/storage/pluggable.py b/splitio/storage/pluggable.py index 66fad1e5..1ac12bd2 100644 --- a/splitio/storage/pluggable.py +++ b/splitio/storage/pluggable.py @@ -15,7 +15,7 @@ _LOGGER = logging.getLogger(__name__) class PluggableRuleBasedSegmentsStorageBase(RuleBasedSegmentsStorage): - """RedPluggable storage for rule based segments.""" + """Pluggable storage for rule based segments.""" _TILL_LENGTH = 4 From d0b2c6729f9f0f18a271da2d9472d6a3652bc638 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Fri, 14 Mar 2025 11:47:20 -0300 Subject: [PATCH 16/56] Update splitio/storage/pluggable.py Co-authored-by: Emiliano Sanchez --- splitio/storage/pluggable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splitio/storage/pluggable.py b/splitio/storage/pluggable.py index 1ac12bd2..20d4d437 100644 --- a/splitio/storage/pluggable.py +++ b/splitio/storage/pluggable.py @@ -98,7 +98,7 @@ def get_large_segment_names(self): pass class PluggableRuleBasedSegmentsStorage(PluggableRuleBasedSegmentsStorageBase): - """RedPluggable storage for rule based segments.""" + """Pluggable storage for rule based segments.""" def __init__(self, pluggable_adapter, prefix=None): """ From cc990a9bedcb7de033ac8f07b8ee416023a5bebe Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Fri, 14 Mar 2025 11:47:27 -0300 Subject: [PATCH 17/56] Update splitio/storage/pluggable.py Co-authored-by: Emiliano Sanchez --- splitio/storage/pluggable.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splitio/storage/pluggable.py b/splitio/storage/pluggable.py index 20d4d437..c27a92fd 100644 --- a/splitio/storage/pluggable.py +++ b/splitio/storage/pluggable.py @@ -179,7 +179,7 @@ def get_segment_names(self): return None class PluggableRuleBasedSegmentsStorageAsync(PluggableRuleBasedSegmentsStorageBase): - """RedPluggable storage for rule based segments.""" + """Pluggable storage for rule based segments.""" def __init__(self, pluggable_adapter, prefix=None): """ From 4f7d8dc2b2d9e319cc934ad5181d71bdfa1375c3 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Tue, 18 Mar 2025 19:25:12 -0700 Subject: [PATCH 18/56] Updated tests --- splitio/client/factory.py | 23 +- splitio/sync/split.py | 2 +- tests/api/test_auth.py | 4 +- tests/api/test_splits_api.py | 12 +- tests/client/test_client.py | 155 +++- tests/client/test_input_validator.py | 22 +- tests/client/test_localhost.py | 14 +- tests/client/test_manager.py | 8 +- tests/integration/__init__.py | 70 +- tests/integration/files/splitChanges.json | 114 ++- tests/integration/files/split_changes.json | 8 +- .../integration/files/split_changes_temp.json | 2 +- tests/integration/test_client_e2e.py | 302 +++++-- .../integration/test_pluggable_integration.py | 32 +- tests/integration/test_redis_integration.py | 16 +- tests/integration/test_streaming_e2e.py | 766 ++++++++++-------- tests/models/grammar/test_matchers.py | 8 +- tests/push/test_parser.py | 4 +- tests/storage/test_pluggable.py | 56 +- tests/sync/test_manager.py | 8 +- tests/sync/test_segments_synchronizer.py | 44 +- tests/sync/test_splits_synchronizer.py | 66 +- tests/sync/test_synchronizer.py | 80 +- tests/sync/test_telemetry.py | 4 +- tests/tasks/test_segment_sync.py | 8 +- tests/tasks/test_split_sync.py | 73 +- 26 files changed, 1163 insertions(+), 738 deletions(-) diff --git a/splitio/client/factory.py b/splitio/client/factory.py index bb402bb5..7c56819f 100644 --- a/splitio/client/factory.py +++ b/splitio/client/factory.py @@ -23,14 +23,17 @@ from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySegmentStorage, \ InMemoryImpressionStorage, InMemoryEventStorage, InMemoryTelemetryStorage, LocalhostTelemetryStorage, \ InMemorySplitStorageAsync, InMemorySegmentStorageAsync, InMemoryImpressionStorageAsync, \ - InMemoryEventStorageAsync, InMemoryTelemetryStorageAsync, LocalhostTelemetryStorageAsync + InMemoryEventStorageAsync, InMemoryTelemetryStorageAsync, LocalhostTelemetryStorageAsync, \ + InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.storage.adapters import redis from splitio.storage.redis import RedisSplitStorage, RedisSegmentStorage, RedisImpressionsStorage, \ RedisEventsStorage, RedisTelemetryStorage, RedisSplitStorageAsync, RedisEventsStorageAsync,\ - RedisSegmentStorageAsync, RedisImpressionsStorageAsync, RedisTelemetryStorageAsync + RedisSegmentStorageAsync, RedisImpressionsStorageAsync, RedisTelemetryStorageAsync, \ + RedisRuleBasedSegmentsStorage, RedisRuleBasedSegmentsStorageAsync from splitio.storage.pluggable import PluggableEventsStorage, PluggableImpressionsStorage, PluggableSegmentStorage, \ PluggableSplitStorage, PluggableTelemetryStorage, PluggableTelemetryStorageAsync, PluggableEventsStorageAsync, \ - PluggableImpressionsStorageAsync, PluggableSegmentStorageAsync, PluggableSplitStorageAsync + PluggableImpressionsStorageAsync, PluggableSegmentStorageAsync, PluggableSplitStorageAsync, \ + PluggableRuleBasedSegmentsStorage, PluggableRuleBasedSegmentsStorageAsync # APIs from splitio.api.client import HttpClient, HttpClientAsync, HttpClientKerberos @@ -543,6 +546,7 @@ def _build_in_memory_factory(api_key, cfg, sdk_url=None, events_url=None, # pyl storages = { 'splits': InMemorySplitStorage(cfg['flagSetsFilter'] if cfg['flagSetsFilter'] is not None else []), 'segments': InMemorySegmentStorage(), + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': InMemoryImpressionStorage(cfg['impressionsQueueSize'], telemetry_runtime_producer), 'events': InMemoryEventStorage(cfg['eventsQueueSize'], telemetry_runtime_producer), } @@ -559,7 +563,7 @@ def _build_in_memory_factory(api_key, cfg, sdk_url=None, events_url=None, # pyl imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers( - SplitSynchronizer(apis['splits'], storages['splits']), + SplitSynchronizer(apis['splits'], storages['splits'], storages['rule_based_segments']), SegmentSynchronizer(apis['segments'], storages['splits'], storages['segments']), ImpressionSynchronizer(apis['impressions'], storages['impressions'], cfg['impressionsBulkSize']), @@ -671,6 +675,7 @@ async def _build_in_memory_factory_async(api_key, cfg, sdk_url=None, events_url= storages = { 'splits': InMemorySplitStorageAsync(cfg['flagSetsFilter'] if cfg['flagSetsFilter'] is not None else []), 'segments': InMemorySegmentStorageAsync(), + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': InMemoryImpressionStorageAsync(cfg['impressionsQueueSize'], telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(cfg['eventsQueueSize'], telemetry_runtime_producer), } @@ -687,7 +692,7 @@ async def _build_in_memory_factory_async(api_key, cfg, sdk_url=None, events_url= imp_strategy, none_strategy, telemetry_runtime_producer) synchronizers = SplitSynchronizers( - SplitSynchronizerAsync(apis['splits'], storages['splits']), + SplitSynchronizerAsync(apis['splits'], storages['splits'], storages['rule_based_segments']), SegmentSynchronizerAsync(apis['segments'], storages['splits'], storages['segments']), ImpressionSynchronizerAsync(apis['impressions'], storages['impressions'], cfg['impressionsBulkSize']), @@ -756,6 +761,7 @@ def _build_redis_factory(api_key, cfg): storages = { 'splits': RedisSplitStorage(redis_adapter, cache_enabled, cache_ttl, []), 'segments': RedisSegmentStorage(redis_adapter), + 'rule_based_segments': RedisRuleBasedSegmentsStorage(redis_adapter), 'impressions': RedisImpressionsStorage(redis_adapter, sdk_metadata), 'events': RedisEventsStorage(redis_adapter, sdk_metadata), 'telemetry': RedisTelemetryStorage(redis_adapter, sdk_metadata) @@ -839,6 +845,7 @@ async def _build_redis_factory_async(api_key, cfg): storages = { 'splits': RedisSplitStorageAsync(redis_adapter, cache_enabled, cache_ttl), 'segments': RedisSegmentStorageAsync(redis_adapter), + 'rule_based_segments': RedisRuleBasedSegmentsStorageAsync(redis_adapter), 'impressions': RedisImpressionsStorageAsync(redis_adapter, sdk_metadata), 'events': RedisEventsStorageAsync(redis_adapter, sdk_metadata), 'telemetry': await RedisTelemetryStorageAsync.create(redis_adapter, sdk_metadata) @@ -922,6 +929,7 @@ def _build_pluggable_factory(api_key, cfg): storages = { 'splits': PluggableSplitStorage(pluggable_adapter, storage_prefix, []), 'segments': PluggableSegmentStorage(pluggable_adapter, storage_prefix), + 'rule_based_segments': PluggableRuleBasedSegmentsStorage(pluggable_adapter, storage_prefix), 'impressions': PluggableImpressionsStorage(pluggable_adapter, sdk_metadata, storage_prefix), 'events': PluggableEventsStorage(pluggable_adapter, sdk_metadata, storage_prefix), 'telemetry': PluggableTelemetryStorage(pluggable_adapter, sdk_metadata, storage_prefix) @@ -1003,6 +1011,7 @@ async def _build_pluggable_factory_async(api_key, cfg): storages = { 'splits': PluggableSplitStorageAsync(pluggable_adapter, storage_prefix), 'segments': PluggableSegmentStorageAsync(pluggable_adapter, storage_prefix), + 'rule_based_segments': PluggableRuleBasedSegmentsStorageAsync(pluggable_adapter, storage_prefix), 'impressions': PluggableImpressionsStorageAsync(pluggable_adapter, sdk_metadata, storage_prefix), 'events': PluggableEventsStorageAsync(pluggable_adapter, sdk_metadata, storage_prefix), 'telemetry': await PluggableTelemetryStorageAsync.create(pluggable_adapter, sdk_metadata, storage_prefix) @@ -1081,6 +1090,7 @@ def _build_localhost_factory(cfg): storages = { 'splits': InMemorySplitStorage(cfg['flagSetsFilter'] if cfg['flagSetsFilter'] is not None else []), 'segments': InMemorySegmentStorage(), # not used, just to avoid possible future errors. + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': LocalhostImpressionsStorage(), 'events': LocalhostEventsStorage(), } @@ -1088,6 +1098,7 @@ def _build_localhost_factory(cfg): synchronizers = SplitSynchronizers( LocalSplitSynchronizer(cfg['splitFile'], storages['splits'], + storages['rule_based_segments'], localhost_mode), LocalSegmentSynchronizer(cfg['segmentDirectory'], storages['splits'], storages['segments']), None, None, None, @@ -1151,6 +1162,7 @@ async def _build_localhost_factory_async(cfg): storages = { 'splits': InMemorySplitStorageAsync(), 'segments': InMemorySegmentStorageAsync(), # not used, just to avoid possible future errors. + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': LocalhostImpressionsStorageAsync(), 'events': LocalhostEventsStorageAsync(), } @@ -1158,6 +1170,7 @@ async def _build_localhost_factory_async(cfg): synchronizers = SplitSynchronizers( LocalSplitSynchronizerAsync(cfg['splitFile'], storages['splits'], + storages['rule_based_segments'], localhost_mode), LocalSegmentSynchronizerAsync(cfg['segmentDirectory'], storages['splits'], storages['segments']), None, None, None, diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 58ea900a..fa7562d0 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -597,7 +597,7 @@ def _sanitize_condition(self, feature_flag): { "treatment": "off", "size": 100 } ], "label": "default rule" - }) + }) return feature_flag diff --git a/tests/api/test_auth.py b/tests/api/test_auth.py index a842bd36..175977a2 100644 --- a/tests/api/test_auth.py +++ b/tests/api/test_auth.py @@ -34,7 +34,7 @@ def test_auth(self, mocker): call_made = httpclient.get.mock_calls[0] # validate positional arguments - assert call_made[1] == ('auth', 'v2/auth?s=1.1', 'some_api_key') + assert call_made[1] == ('auth', 'v2/auth?s=1.3', 'some_api_key') # validate key-value args (headers) assert call_made[2]['extra_headers'] == { @@ -89,7 +89,7 @@ async def get(verb, url, key, extra_headers): # validate positional arguments assert self.verb == 'auth' - assert self.url == 'v2/auth?s=1.1' + assert self.url == 'v2/auth?s=1.3' assert self.key == 'some_api_key' assert self.headers == { 'SplitSDKVersion': 'python-%s' % __version__, diff --git a/tests/api/test_splits_api.py b/tests/api/test_splits_api.py index 1826ec23..af9819ea 100644 --- a/tests/api/test_splits_api.py +++ b/tests/api/test_splits_api.py @@ -24,7 +24,7 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' }, - query={'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'})] + query={'s': '1.3', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'})] httpclient.reset_mock() response = split_api.fetch_splits(123, 1, FetchOptions(True, 123, None,'set3')) @@ -36,7 +36,7 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'})] + query={'s': '1.3', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() response = split_api.fetch_splits(123, 122, FetchOptions(True, 123, None, 'set3')) @@ -48,7 +48,7 @@ def test_fetch_split_changes(self, mocker): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' }, - query={'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123, 'sets': 'set3'})] + query={'s': '1.3', 'since': 123, 'rbSince': 122, 'till': 123, 'sets': 'set3'})] httpclient.reset_mock() def raise_exception(*args, **kwargs): @@ -92,7 +92,7 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineIP': '1.2.3.4', 'SplitSDKMachineName': 'some' } - assert self.query == {'s': '1.1', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'} + assert self.query == {'s': '1.3', 'since': 123, 'rbSince': -1, 'sets': 'set1,set2'} httpclient.reset_mock() response = await split_api.fetch_splits(123, 1, FetchOptions(True, 123, None, 'set3')) @@ -106,7 +106,7 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'} + assert self.query == {'s': '1.3', 'since': 123, 'rbSince': 1, 'till': 123, 'sets': 'set3'} httpclient.reset_mock() response = await split_api.fetch_splits(123, 122, FetchOptions(True, 123, None)) @@ -120,7 +120,7 @@ async def get(verb, url, key, query, extra_headers): 'SplitSDKMachineName': 'some', 'Cache-Control': 'no-cache' } - assert self.query == {'s': '1.1', 'since': 123, 'rbSince': 122, 'till': 123} + assert self.query == {'s': '1.3', 'since': 123, 'rbSince': 122, 'till': 123} httpclient.reset_mock() def raise_exception(*args, **kwargs): diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 48a0fba2..526b7347 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -11,10 +11,11 @@ from splitio.client.factory import SplitFactory, Status as FactoryStatus, SplitFactoryAsync from splitio.models.impressions import Impression, Label from splitio.models.events import Event, EventWrapper -from splitio.storage import EventStorage, ImpressionStorage, SegmentStorage, SplitStorage +from splitio.storage import EventStorage, ImpressionStorage, SegmentStorage, SplitStorage, RuleBasedSegmentsStorage from splitio.storage.inmemmory import InMemorySplitStorage, InMemorySegmentStorage, \ InMemoryImpressionStorage, InMemoryTelemetryStorage, InMemorySplitStorageAsync, \ - InMemoryImpressionStorageAsync, InMemorySegmentStorageAsync, InMemoryTelemetryStorageAsync, InMemoryEventStorageAsync + InMemoryImpressionStorageAsync, InMemorySegmentStorageAsync, InMemoryTelemetryStorageAsync, InMemoryEventStorageAsync, \ + InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.models.splits import Split, Status, from_raw from splitio.engine.impressions.impressions import Manager as ImpressionManager from splitio.engine.impressions.manager import Counter as ImpressionsCounter @@ -35,6 +36,7 @@ def test_get_treatment(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -55,6 +57,7 @@ def synchronize_config(*_): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -70,7 +73,7 @@ def synchronize_config(*_): type(factory).ready = ready_property factory.block_until_ready(5) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) client = Client(factory, recorder, True) client._evaluator = mocker.Mock(spec=Evaluator) client._evaluator.eval_with_context.return_value = { @@ -110,6 +113,7 @@ def test_get_treatment_with_config(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) @@ -123,6 +127,7 @@ def test_get_treatment_with_config(self, mocker): {'splits': split_storage, 'segments': segment_storage, 'impressions': impression_storage, + 'rule_based_segments': rb_segment_storage, 'events': event_storage}, mocker.Mock(), recorder, @@ -140,7 +145,7 @@ def synchronize_config(*_): mocker.patch('splitio.client.client.utctime_ms', new=lambda: 1000) mocker.patch('splitio.client.client.get_latency_bucket_index', new=lambda x: 5) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) client = Client(factory, recorder, True) client._evaluator = mocker.Mock(spec=Evaluator) client._evaluator.eval_with_context.return_value = { @@ -185,11 +190,12 @@ def test_get_treatments(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -198,6 +204,7 @@ def test_get_treatments(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -263,11 +270,12 @@ def test_get_treatments_by_flag_set(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -276,6 +284,7 @@ def test_get_treatments_by_flag_set(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -340,11 +349,12 @@ def test_get_treatments_by_flag_sets(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -353,6 +363,7 @@ def test_get_treatments_by_flag_sets(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -417,11 +428,12 @@ def test_get_treatments_with_config(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -429,6 +441,7 @@ def test_get_treatments_with_config(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -498,11 +511,12 @@ def test_get_treatments_with_config_by_flag_set(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -510,6 +524,7 @@ def test_get_treatments_with_config_by_flag_set(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -576,11 +591,12 @@ def test_get_treatments_with_config_by_flag_sets(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -588,6 +604,7 @@ def test_get_treatments_with_config_by_flag_sets(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -654,6 +671,7 @@ def test_impression_toggle_optimized(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -673,6 +691,7 @@ def synchronize_config(*_): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -687,9 +706,9 @@ def synchronize_config(*_): factory.block_until_ready(5) split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = Client(factory, recorder, True) assert client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -716,6 +735,7 @@ def test_impression_toggle_debug(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -735,6 +755,7 @@ def synchronize_config(*_): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -749,9 +770,9 @@ def synchronize_config(*_): factory.block_until_ready(5) split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = Client(factory, recorder, True) assert client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -778,6 +799,7 @@ def test_impression_toggle_none(self, mocker): telemetry_producer = TelemetryStorageProducer(telemetry_storage) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorage(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -797,6 +819,7 @@ def synchronize_config(*_): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -811,9 +834,9 @@ def synchronize_config(*_): factory.block_until_ready(5) split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = Client(factory, recorder, True) assert client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -829,6 +852,7 @@ def test_destroy(self, mocker): """Test that destroy/destroyed calls are forwarded to the factory.""" split_storage = mocker.Mock(spec=SplitStorage) segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) @@ -839,6 +863,7 @@ def test_destroy(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -863,6 +888,7 @@ def test_track(self, mocker): """Test that destroy/destroyed calls are forwarded to the factory.""" split_storage = mocker.Mock(spec=SplitStorage) segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) event_storage.put.return_value = True @@ -874,6 +900,7 @@ def test_track(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -912,7 +939,8 @@ def test_evaluations_before_running_post_fork(self, mocker): impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + rb_segment_storage = InMemoryRuleBasedSegmentStorage() + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -921,6 +949,7 @@ def test_evaluations_before_running_post_fork(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': mocker.Mock()}, mocker.Mock(), @@ -991,11 +1020,13 @@ def test_telemetry_not_ready(self, mocker): impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + rb_segment_storage = InMemoryRuleBasedSegmentStorage() + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) recorder = StandardRecorder(impmanager, mocker.Mock(), mocker.Mock(), telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) factory = SplitFactory('localhost', {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': mocker.Mock()}, mocker.Mock(), @@ -1021,8 +1052,9 @@ def synchronize_config(*_): def test_telemetry_record_treatment_exception(self, mocker): split_storage = InMemorySplitStorage() - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) destroyed_property = mocker.PropertyMock() @@ -1038,6 +1070,7 @@ def test_telemetry_record_treatment_exception(self, mocker): factory = SplitFactory('localhost', {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1125,7 +1158,8 @@ def test_telemetry_method_latency(self, mocker): impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + rb_segment_storage = InMemoryRuleBasedSegmentStorage() + split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) recorder = StandardRecorder(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1136,6 +1170,7 @@ def test_telemetry_method_latency(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1189,6 +1224,7 @@ def stop(*_): def test_telemetry_track_exception(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) destroyed_property = mocker.PropertyMock() @@ -1204,6 +1240,7 @@ def test_telemetry_track_exception(self, mocker): factory = SplitFactory(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1238,12 +1275,13 @@ async def test_get_treatment_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1257,6 +1295,7 @@ async def synchronize_config(*_): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1307,12 +1346,13 @@ async def test_get_treatment_with_config_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1320,6 +1360,7 @@ async def test_get_treatment_with_config_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1382,12 +1423,13 @@ async def test_get_treatments_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1395,6 +1437,7 @@ async def test_get_treatments_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1460,12 +1503,13 @@ async def test_get_treatments_by_flag_set_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1473,6 +1517,7 @@ async def test_get_treatments_by_flag_set_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1538,12 +1583,13 @@ async def test_get_treatments_by_flag_sets_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -1551,6 +1597,7 @@ async def test_get_treatments_by_flag_sets_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1616,18 +1663,20 @@ async def test_get_treatments_with_config(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1698,18 +1747,20 @@ async def test_get_treatments_with_config_by_flag_set(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1780,18 +1831,20 @@ async def test_get_treatments_with_config_by_flag_sets(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0]), from_raw(splits_json['splitChange1_1']['splits'][1])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0]), from_raw(splits_json['splitChange1_1']['ff']['d'][1])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1862,6 +1915,7 @@ async def test_impression_toggle_optimized(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -1877,6 +1931,7 @@ async def test_impression_toggle_optimized(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1890,9 +1945,9 @@ async def test_impression_toggle_optimized(self, mocker): await factory.block_until_ready(5) await split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = ClientAsync(factory, recorder, True) treatment = await client.get_treatment('some_key', 'SPLIT_1') @@ -1923,6 +1978,7 @@ async def test_impression_toggle_debug(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -1938,6 +1994,7 @@ async def test_impression_toggle_debug(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -1951,9 +2008,9 @@ async def test_impression_toggle_debug(self, mocker): await factory.block_until_ready(5) await split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = ClientAsync(factory, recorder, True) assert await client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -1981,6 +2038,7 @@ async def test_impression_toggle_none(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = mocker.Mock(spec=EventStorage) @@ -1996,6 +2054,7 @@ async def test_impression_toggle_none(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -2009,9 +2068,9 @@ async def test_impression_toggle_none(self, mocker): await factory.block_until_ready(5) await split_storage.update([ - from_raw(splits_json['splitChange1_1']['splits'][0]), - from_raw(splits_json['splitChange1_1']['splits'][1]), - from_raw(splits_json['splitChange1_1']['splits'][2]) + from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) client = ClientAsync(factory, recorder, True) assert await client.get_treatment('some_key', 'SPLIT_1') == 'off' @@ -2027,6 +2086,7 @@ async def test_track_async(self, mocker): """Test that destroy/destroyed calls are forwarded to the factory.""" split_storage = InMemorySplitStorageAsync() segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) self.events = [] @@ -2042,6 +2102,7 @@ async def put(event): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -2076,15 +2137,17 @@ async def test_telemetry_not_ready_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = InMemoryEventStorageAsync(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) factory = SplitFactoryAsync('localhost', {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': mocker.Mock()}, mocker.Mock(), @@ -2117,12 +2180,13 @@ async def test_telemetry_record_treatment_exception_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = InMemoryEventStorageAsync(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -2132,6 +2196,7 @@ async def test_telemetry_record_treatment_exception_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -2189,12 +2254,13 @@ async def test_telemetry_method_latency_async(self, mocker): telemetry_producer = TelemetryStorageProducerAsync(telemetry_storage) split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() impression_storage = InMemoryImpressionStorageAsync(10, telemetry_runtime_producer) event_storage = InMemoryEventStorageAsync(10, telemetry_runtime_producer) impmanager = ImpressionManager(StrategyDebugMode(), StrategyNoneMode(), telemetry_runtime_producer) recorder = StandardRecorderAsync(impmanager, event_storage, impression_storage, telemetry_producer.get_telemetry_evaluation_producer(), telemetry_producer.get_telemetry_runtime_producer()) - await split_storage.update([from_raw(splits_json['splitChange1_1']['splits'][0])], [], -1) + await split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -2204,6 +2270,7 @@ async def test_telemetry_method_latency_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), @@ -2260,6 +2327,7 @@ async def synchronize_config(*_): async def test_telemetry_track_exception_async(self, mocker): split_storage = InMemorySplitStorageAsync() segment_storage = mocker.Mock(spec=SegmentStorage) + rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) impression_storage = mocker.Mock(spec=ImpressionStorage) destroyed_property = mocker.PropertyMock() destroyed_property.return_value = False @@ -2275,6 +2343,7 @@ async def test_telemetry_track_exception_async(self, mocker): factory = SplitFactoryAsync(mocker.Mock(), {'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': impression_storage, 'events': event_storage}, mocker.Mock(), diff --git a/tests/client/test_input_validator.py b/tests/client/test_input_validator.py index 5afecdd4..81b1c06b 100644 --- a/tests/client/test_input_validator.py +++ b/tests/client/test_input_validator.py @@ -6,7 +6,7 @@ from splitio.client.client import CONTROL, Client, _LOGGER as _logger, ClientAsync from splitio.client.manager import SplitManager, SplitManagerAsync from splitio.client.key import Key -from splitio.storage import SplitStorage, EventStorage, ImpressionStorage, SegmentStorage +from splitio.storage import SplitStorage, EventStorage, ImpressionStorage, SegmentStorage, RuleBasedSegmentsStorage from splitio.storage.inmemmory import InMemoryTelemetryStorage, InMemoryTelemetryStorageAsync, \ InMemorySplitStorage, InMemorySplitStorageAsync from splitio.models.splits import Split @@ -40,6 +40,7 @@ def test_get_treatment(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -277,6 +278,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -551,6 +553,7 @@ def test_track(self, mocker): { 'splits': split_storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': events_storage_mock, }, @@ -825,6 +828,7 @@ def test_get_treatments(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -969,6 +973,7 @@ def test_get_treatments_with_config(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1113,6 +1118,7 @@ def test_get_treatments_by_flag_set(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1228,6 +1234,7 @@ def test_get_treatments_by_flag_sets(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1353,6 +1360,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1472,6 +1480,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1624,6 +1633,7 @@ async def get_change_number(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1880,6 +1890,7 @@ async def get_change_number(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2123,6 +2134,7 @@ async def put(*_): { 'splits': split_storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': events_storage_mock, }, @@ -2407,6 +2419,7 @@ async def fetch_many(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2565,6 +2578,7 @@ async def fetch_many(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2726,6 +2740,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2865,6 +2880,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3014,6 +3030,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3156,6 +3173,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3312,6 +3330,7 @@ def test_split_(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3388,6 +3407,7 @@ async def get(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), + 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, diff --git a/tests/client/test_localhost.py b/tests/client/test_localhost.py index 280e79f9..598d6100 100644 --- a/tests/client/test_localhost.py +++ b/tests/client/test_localhost.py @@ -6,7 +6,7 @@ from splitio.sync.split import LocalSplitSynchronizer from splitio.models.splits import Split from splitio.models.grammar.matchers import AllKeysMatcher -from splitio.storage import SplitStorage +from splitio.storage import SplitStorage, RuleBasedSegmentsStorage class LocalHostStoragesTests(object): @@ -112,10 +112,10 @@ def test_update_splits(self, mocker): parse_yaml.return_value = {} storage_mock = mocker.Mock(spec=SplitStorage) storage_mock.get_split_names.return_value = [] - + rbs = mocker.Mock(spec=RuleBasedSegmentsStorage) parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('something', storage_mock) + sync = LocalSplitSynchronizer('something', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() @@ -124,7 +124,7 @@ def test_update_splits(self, mocker): parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('something.yaml', storage_mock) + sync = LocalSplitSynchronizer('something.yaml', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() @@ -133,7 +133,7 @@ def test_update_splits(self, mocker): parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('something.yml', storage_mock) + sync = LocalSplitSynchronizer('something.yml', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() @@ -142,7 +142,7 @@ def test_update_splits(self, mocker): parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('something.YAML', storage_mock) + sync = LocalSplitSynchronizer('something.YAML', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() @@ -151,7 +151,7 @@ def test_update_splits(self, mocker): parse_legacy.reset_mock() parse_yaml.reset_mock() - sync = LocalSplitSynchronizer('yaml', storage_mock) + sync = LocalSplitSynchronizer('yaml', storage_mock, rbs) sync._read_feature_flags_from_legacy_file = parse_legacy sync._read_feature_flags_from_yaml_file = parse_yaml sync.synchronize_splits() diff --git a/tests/client/test_manager.py b/tests/client/test_manager.py index ae856f9a..19e1bbb0 100644 --- a/tests/client/test_manager.py +++ b/tests/client/test_manager.py @@ -26,8 +26,8 @@ def test_manager_calls(self, mocker): factory.ready = True manager = SplitManager(factory) - split1 = splits.from_raw(splits_json["splitChange1_1"]["splits"][0]) - split2 = splits.from_raw(splits_json["splitChange1_3"]["splits"][0]) + split1 = splits.from_raw(splits_json["splitChange1_1"]['ff']['d'][0]) + split2 = splits.from_raw(splits_json["splitChange1_3"]['ff']['d'][0]) storage.update([split1, split2], [], -1) manager._storage = storage @@ -98,8 +98,8 @@ async def test_manager_calls(self, mocker): factory.ready = True manager = SplitManagerAsync(factory) - split1 = splits.from_raw(splits_json["splitChange1_1"]["splits"][0]) - split2 = splits.from_raw(splits_json["splitChange1_3"]["splits"][0]) + split1 = splits.from_raw(splits_json["splitChange1_1"]['ff']['d'][0]) + split2 = splits.from_raw(splits_json["splitChange1_3"]['ff']['d'][0]) await storage.update([split1, split2], [], -1) manager._storage = storage diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index ab6e3293..124f5b37 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -1,53 +1,57 @@ -split11 = {"splits": [ +import copy + +rbsegments_json = [{ + "segment1": {"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []} +}] + +split11 = {"ff": {"t": 1675443569027, "s": -1, "d": [ {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": False}, {"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202,"seed": -1442762199, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443537882,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}], "sets": ["set_1", "set_2"]}, {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": True} - ],"since": -1,"till": 1675443569027} -split12 = {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 167544376728} -split13 = {"splits": [ + ]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split12 = {"ff": {"s": 1675443569027,"t": 167544376728, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split13 = {"ff": {"s": 1675443767288,"t": 1675443984594, "d": [ {"trafficTypeName": "user","name": "SPLIT_1","trafficAllocation": 100,"trafficAllocationSeed": -1780071202,"seed": -1442762199,"status": "ARCHIVED","killed": False,"defaultTreatment": "off","changeNumber": 1675443984594,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}]}, {"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443954220,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]} - ],"since": 1675443767288,"till": 1675443984594} + ]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} -split41 = split11 -split42 = split12 -split43 = split13 -split41["since"] = None -split41["till"] = None -split42["since"] = None -split42["till"] = None -split43["since"] = None -split43["till"] = None +split41 = {"ff": {"t": None, "s": None, "d": split11['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split42 = {"ff": {"t": None, "s": None, "d": split12['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split43 = {"ff": {"t": None, "s": None, "d": split13['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} -split61 = split11 -split62 = split12 -split63 = split13 - -split61["since"] = -1 -split61["till"] = -1 -split62["since"] = -1 -split62["till"] = -1 -split63["since"] = -1 -split63["till"] = -1 +split61 = {"ff": {"t": -1, "s": -1, "d": split11['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split62 = {"ff": {"t": -1, "s": -1, "d": split12['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split63 = {"ff": {"t": -1, "s": -1, "d": split13['ff']['d']}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} splits_json = { "splitChange1_1": split11, "splitChange1_2": split12, "splitChange1_3": split13, - "splitChange2_1": {"splits": [{"name": "SPLIT_1","status": "ACTIVE","killed": False,"defaultTreatment": "off","configurations": {},"conditions": []}]}, - "splitChange3_1": {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, - "splitChange3_2": {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 1675443569027}, + "splitChange2_1": {"ff": {"t": -1, "s": -1, "d": [{"name": "SPLIT_1","status": "ACTIVE","killed": False,"defaultTreatment": "off","configurations": {},"conditions": []}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange3_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange3_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, "splitChange4_1": split41, "splitChange4_2": split42, "splitChange4_3": split43, - "splitChange5_1": {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, - "splitChange5_2": {"splits": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569026,"till": 1675443569026}, + "splitChange5_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange5_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569026,"till": 1675443569026}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, "splitChange6_1": split61, "splitChange6_2": split62, "splitChange6_3": split63, -} - -rbsegments_json = { - "segment1": {"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []} + "splitChange7_1": {"ff": { + "t": -1, + "s": -1, + "d": [{"changeNumber": 10,"trafficTypeName": "user","name": "rbs_feature_flag","trafficAllocation": 100,"trafficAllocationSeed": 1828377380,"seed": -286617921,"status": "ACTIVE","killed": False,"defaultTreatment": "off","algo": 2, + "conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": {"trafficType": "user"},"matcherType": "IN_RULE_BASED_SEGMENT","negate": False,"userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]},"partitions": [{"treatment": "on","size": 100},{"treatment": "off","size": 0}],"label": "in rule based segment sample_rule_based_segment"},{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": {"trafficType": "user"},"matcherType": "ALL_KEYS","negate": False}]},"partitions": [{"treatment": "on","size": 0},{"treatment": "off","size": 100}],"label": "default rule"}], + "configurations": {}, + "sets": [], + "impressionsDisabled": False + }] + }, "rbs": { + "t": 1675259356568, + "s": -1, + "d": [{"changeNumber": 5,"name": "sample_rule_based_segment","status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":["mauro@split.io","gaston@split.io"],"segments":[]}, + "conditions": [{"matcherGroup": {"combiner": "AND","matchers": [{"keySelector": {"trafficType": "user","attribute": "email"},"matcherType": "ENDS_WITH","negate": False,"whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]} + ]}} } \ No newline at end of file diff --git a/tests/integration/files/splitChanges.json b/tests/integration/files/splitChanges.json index 9125481d..d9ab1c24 100644 --- a/tests/integration/files/splitChanges.json +++ b/tests/integration/files/splitChanges.json @@ -1,5 +1,6 @@ { - "splits": [ + "ff": { + "d": [ { "orgId": null, "environment": null, @@ -321,8 +322,111 @@ } ], "sets": [] - } + }, + { + "changeNumber": 10, + "trafficTypeName": "user", + "name": "rbs_feature_flag", + "trafficAllocation": 100, + "trafficAllocationSeed": 1828377380, + "seed": -286617921, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "algo": 2, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "ALL_KEYS", + "negate": false + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ], + "configurations": {}, + "sets": [], + "impressionsDisabled": false + } ], - "since": -1, - "till": 1457726098069 -} + "s": -1, + "t": 1457726098069 +}, "rbs": {"t": -1, "s": -1, "d": [{ + "changeNumber": 123, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] +}]}} diff --git a/tests/integration/files/split_changes.json b/tests/integration/files/split_changes.json index 6084b108..f0708043 100644 --- a/tests/integration/files/split_changes.json +++ b/tests/integration/files/split_changes.json @@ -1,5 +1,6 @@ { - "splits": [ + "ff": { + "d": [ { "orgId": null, "environment": null, @@ -323,6 +324,7 @@ "sets": [] } ], - "since": -1, - "till": 1457726098069 + "s": -1, + "t": 1457726098069 +}, "rbs": {"t": -1, "s": -1, "d": []} } diff --git a/tests/integration/files/split_changes_temp.json b/tests/integration/files/split_changes_temp.json index 162c0b17..64575226 100644 --- a/tests/integration/files/split_changes_temp.json +++ b/tests/integration/files/split_changes_temp.json @@ -1 +1 @@ -{"splits": [{"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202, "seed": -1442762199, "status": "ARCHIVED", "killed": false, "defaultTreatment": "off", "changeNumber": 1675443984594, "algo": 2, "configurations": {}, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": null}, "matcherType": "ALL_KEYS", "negate": false, "userDefinedSegmentMatcherData": null, "whitelistMatcherData": null, "unaryNumericMatcherData": null, "betweenMatcherData": null, "booleanMatcherData": null, "dependencyMatcherData": null, "stringMatcherData": null}]}, "partitions": [{"treatment": "on", "size": 0}, {"treatment": "off", "size": 100}], "label": "default rule"}]}, {"trafficTypeName": "user", "name": "SPLIT_2", "trafficAllocation": 100, "trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "changeNumber": 1675443954220, "algo": 2, "configurations": {}, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": null}, "matcherType": "ALL_KEYS", "negate": false, "userDefinedSegmentMatcherData": null, "whitelistMatcherData": null, "unaryNumericMatcherData": null, "betweenMatcherData": null, "booleanMatcherData": null, "dependencyMatcherData": null, "stringMatcherData": null}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "default rule"}]}], "since": -1, "till": -1} \ No newline at end of file +{"ff": {"t": -1, "s": -1, "d": [{"changeNumber": 10, "trafficTypeName": "user", "name": "rbs_feature_flag", "trafficAllocation": 100, "trafficAllocationSeed": 1828377380, "seed": -286617921, "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "algo": 2, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "IN_RULE_BASED_SEGMENT", "negate": false, "userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "in rule based segment sample_rule_based_segment"}, {"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "ALL_KEYS", "negate": false}]}, "partitions": [{"treatment": "on", "size": 0}, {"treatment": "off", "size": 100}], "label": "default rule"}], "configurations": {}, "sets": [], "impressionsDisabled": false}]}, "rbs": {"t": 1675259356568, "s": -1, "d": [{"changeNumber": 5, "name": "sample_rule_based_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": ["mauro@split.io", "gaston@split.io"], "segments": []}, "conditions": [{"matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": "email"}, "matcherType": "ENDS_WITH", "negate": false, "whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]}]}} \ No newline at end of file diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index 94a11624..c8a6a666 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -15,15 +15,17 @@ from splitio.storage.inmemmory import InMemoryEventStorage, InMemoryImpressionStorage, \ InMemorySegmentStorage, InMemorySplitStorage, InMemoryTelemetryStorage, InMemorySplitStorageAsync,\ InMemoryEventStorageAsync, InMemoryImpressionStorageAsync, InMemorySegmentStorageAsync, \ - InMemoryTelemetryStorageAsync + InMemoryTelemetryStorageAsync, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.storage.redis import RedisEventsStorage, RedisImpressionsStorage, \ RedisSplitStorage, RedisSegmentStorage, RedisTelemetryStorage, RedisEventsStorageAsync,\ - RedisImpressionsStorageAsync, RedisSegmentStorageAsync, RedisSplitStorageAsync, RedisTelemetryStorageAsync + RedisImpressionsStorageAsync, RedisSegmentStorageAsync, RedisSplitStorageAsync, RedisTelemetryStorageAsync, \ + RedisRuleBasedSegmentsStorage, RedisRuleBasedSegmentsStorageAsync from splitio.storage.pluggable import PluggableEventsStorage, PluggableImpressionsStorage, PluggableSegmentStorage, \ PluggableTelemetryStorage, PluggableSplitStorage, PluggableEventsStorageAsync, PluggableImpressionsStorageAsync, \ - PluggableSegmentStorageAsync, PluggableSplitStorageAsync, PluggableTelemetryStorageAsync + PluggableSegmentStorageAsync, PluggableSplitStorageAsync, PluggableTelemetryStorageAsync, \ + PluggableRuleBasedSegmentsStorage, PluggableRuleBasedSegmentsStorageAsync from splitio.storage.adapters.redis import build, RedisAdapter, RedisAdapterAsync, build_async -from splitio.models import splits, segments +from splitio.models import splits, segments, rule_based_segments from splitio.engine.impressions.impressions import Manager as ImpressionsManager, ImpressionsMode from splitio.engine.impressions import set_classes, set_classes_async from splitio.engine.impressions.strategies import StrategyDebugMode, StrategyOptimizedMode, StrategyNoneMode @@ -154,6 +156,16 @@ def _get_treatment(factory): if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): _validate_last_impressions(client, ('regex_test', 'abc4', 'on')) + # test rule based segment matcher + assert client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + _validate_last_impressions(client, ('rbs_feature_flag', 'bilal@split.io', 'on')) + + # test rule based segment matcher + assert client.get_treatment('mauro@split.io', 'rbs_feature_flag', {'email': 'mauro@split.io'}) == 'off' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + _validate_last_impressions(client, ('rbs_feature_flag', 'mauro@split.io', 'off')) + def _get_treatment_with_config(factory): """Test client.get_treatment_with_config().""" try: @@ -438,8 +450,8 @@ def _manager_methods(factory): assert result.change_number == 123 assert result.configs['on'] == '{"size":15,"test":20}' - assert len(manager.split_names()) == 7 - assert len(manager.splits()) == 7 + assert len(manager.split_names()) == 8 + assert len(manager.splits()) == 8 class InMemoryDebugIntegrationTests(object): """Inmemory storage-based integration tests.""" @@ -448,13 +460,17 @@ def setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() + rb_segment_storage = InMemoryRuleBasedSegmentStorage() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: split_storage.update([splits.from_raw(split)], [], 0) + for rbs in data['rbs']['d']: + rb_segment_storage.update([rule_based_segments.from_raw(rbs)], [], 0) + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) @@ -473,6 +489,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -604,13 +621,16 @@ def setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - + rb_segment_storage = InMemoryRuleBasedSegmentStorage() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: split_storage.update([splits.from_raw(split)], [], 0) + for rbs in data['rbs']['d']: + rb_segment_storage.update([rule_based_segments.from_raw(rbs)], [], 0) + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) @@ -629,6 +649,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -733,16 +754,20 @@ def setup_method(self): redis_client = build(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorage(redis_client) segment_storage = RedisSegmentStorage(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorage(redis_client) split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: redis_client.set(split_storage._get_key(split['name']), json.dumps(split)) if split.get('sets') is not None: for flag_set in split.get('sets'): redis_client.sadd(split_storage._get_flag_set_key(flag_set), split['name']) - redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['till']) + redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['ff']['t']) + + for rbs in data['rbs']['d']: + redis_client.set(rb_segment_storage._get_key(rbs['name']), json.dumps(rbs)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -763,6 +788,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -899,7 +925,10 @@ def teardown_method(self): "SPLITIO.split.set.set1", "SPLITIO.split.set.set2", "SPLITIO.split.set.set3", - "SPLITIO.split.set.set4" + "SPLITIO.split.set.set4", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] redis_client = RedisAdapter(StrictRedis()) @@ -915,13 +944,17 @@ def setup_method(self): redis_client = build(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorage(redis_client, True) segment_storage = RedisSegmentStorage(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorage(redis_client) split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: redis_client.set(split_storage._get_key(split['name']), json.dumps(split)) - redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['till']) + redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['ff']['t']) + + for rbs in data['rbs']['d']: + redis_client.set(rb_segment_storage._get_key(rbs['name']), json.dumps(rbs)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -943,6 +976,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -986,7 +1020,7 @@ def test_localhost_json_e2e(self): assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' - assert client.get_treatment("key", "SPLIT_2", None) == 'off' + assert client.get_treatment("key", "SPLIT_2", None) == 'on' #?? self._update_temp_file(splits_json['splitChange1_3']) self._synchronize_now() @@ -1044,7 +1078,7 @@ def test_localhost_json_e2e(self): self._synchronize_now() assert sorted(self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] - assert client.get_treatment("key", "SPLIT_2", None) == 'on' + assert client.get_treatment("key", "SPLIT_2", None) == 'off' #?? # Tests 6 self.factory._storages['splits'].update([], ['SPLIT_2'], -1) @@ -1069,6 +1103,12 @@ def test_localhost_json_e2e(self): assert client.get_treatment("key", "SPLIT_1", None) == 'control' assert client.get_treatment("key", "SPLIT_2", None) == 'on' + # rule based segment test + self._update_temp_file(splits_json['splitChange7_1']) + self._synchronize_now() + assert client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' + assert client.get_treatment('mauro@split.io', 'rbs_feature_flag', {'email': 'mauro@split.io'}) == 'off' + def _update_temp_file(self, json_body): f = open(os.path.join(os.path.dirname(__file__), 'files','split_changes_temp.json'), 'w') f.write(json.dumps(json_body)) @@ -1106,7 +1146,6 @@ def test_incorrect_file_e2e(self): factory.destroy(event) event.wait() - def test_localhost_e2e(self): """Instantiate a client with a YAML file and issue get_treatment() calls.""" filename = os.path.join(os.path.dirname(__file__), 'files', 'file2.yaml') @@ -1136,7 +1175,6 @@ def test_localhost_e2e(self): factory.destroy(event) event.wait() - class PluggableIntegrationTests(object): """Pluggable storage-based integration tests.""" @@ -1146,6 +1184,7 @@ def setup_method(self): self.pluggable_storage_adapter = StorageMockAdapter() split_storage = PluggableSplitStorage(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorage(self.pluggable_storage_adapter) + rb_segment_storage = PluggableRuleBasedSegmentsStorage(self.pluggable_storage_adapter) telemetry_pluggable_storage = PluggableTelemetryStorage(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducer(telemetry_pluggable_storage) @@ -1155,6 +1194,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorage(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorage(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -1178,12 +1218,15 @@ def setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) if split.get('sets') is not None: for flag_set in split.get('sets'): self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) - self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -1319,7 +1362,10 @@ def teardown_method(self): "SPLITIO.split.set.set1", "SPLITIO.split.set.set2", "SPLITIO.split.set.set3", - "SPLITIO.split.set.set4" + "SPLITIO.split.set.set4", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: self.pluggable_storage_adapter.delete(key) @@ -1333,6 +1379,7 @@ def setup_method(self): self.pluggable_storage_adapter = StorageMockAdapter() split_storage = PluggableSplitStorage(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorage(self.pluggable_storage_adapter) + rb_segment_storage = PluggableRuleBasedSegmentsStorage(self.pluggable_storage_adapter) telemetry_pluggable_storage = PluggableTelemetryStorage(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducer(telemetry_pluggable_storage) @@ -1342,6 +1389,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorage(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorage(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -1365,12 +1413,15 @@ def setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: if split.get('sets') is not None: for flag_set in split.get('sets'): self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) - self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -1483,7 +1534,10 @@ def teardown_method(self): "SPLITIO.split.set.set1", "SPLITIO.split.set.set2", "SPLITIO.split.set.set3", - "SPLITIO.split.set.set4" + "SPLITIO.split.set.set4", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: self.pluggable_storage_adapter.delete(key) @@ -1497,7 +1551,7 @@ def setup_method(self): self.pluggable_storage_adapter = StorageMockAdapter() split_storage = PluggableSplitStorage(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorage(self.pluggable_storage_adapter) - + rb_segment_storage = PluggableRuleBasedSegmentsStorage(self.pluggable_storage_adapter) telemetry_pluggable_storage = PluggableTelemetryStorage(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducer(telemetry_pluggable_storage) telemetry_runtime_producer = telemetry_producer.get_telemetry_runtime_producer() @@ -1506,6 +1560,7 @@ def setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorage(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorage(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -1552,12 +1607,15 @@ def setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: if split.get('sets') is not None: for flag_set in split.get('sets'): self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) - self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -1668,9 +1726,9 @@ def test_optimized(self): split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = InMemoryTelemetryStorage() @@ -1681,6 +1739,7 @@ def test_optimized(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -1722,9 +1781,9 @@ def test_debug(self): split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = InMemoryTelemetryStorage() @@ -1735,6 +1794,7 @@ def test_debug(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -1776,9 +1836,9 @@ def test_none(self): split_storage = InMemorySplitStorage() segment_storage = InMemorySegmentStorage() - split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = InMemoryTelemetryStorage() @@ -1789,6 +1849,7 @@ def test_none(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorage(), 'impressions': InMemoryImpressionStorage(5000, telemetry_runtime_producer), 'events': InMemoryEventStorage(5000, telemetry_runtime_producer), } @@ -1838,9 +1899,9 @@ def test_optimized(self): split_storage = RedisSplitStorage(redis_client, True) segment_storage = RedisSegmentStorage(redis_client) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = RedisTelemetryStorage(redis_client, metadata) @@ -1851,6 +1912,7 @@ def test_optimized(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': RedisRuleBasedSegmentsStorage(redis_client), 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -1901,9 +1963,9 @@ def test_debug(self): split_storage = RedisSplitStorage(redis_client, True) segment_storage = RedisSegmentStorage(redis_client) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = RedisTelemetryStorage(redis_client, metadata) @@ -1914,6 +1976,7 @@ def test_debug(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': RedisRuleBasedSegmentsStorage(redis_client), 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -1964,9 +2027,9 @@ def test_none(self): split_storage = RedisSplitStorage(redis_client, True) segment_storage = RedisSegmentStorage(redis_client) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = RedisTelemetryStorage(redis_client, metadata) @@ -1977,6 +2040,7 @@ def test_none(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': RedisRuleBasedSegmentsStorage(redis_client), 'impressions': RedisImpressionsStorage(redis_client, metadata), 'events': RedisEventsStorage(redis_client, metadata), } @@ -2046,13 +2110,17 @@ async def _setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await split_storage.update([splits.from_raw(split)], [], -1) + for rbs in data['rbs']['d']: + await rb_segment_storage.update([rule_based_segments.from_raw(rbs)], [], 0) + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) @@ -2071,6 +2139,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -2212,13 +2281,16 @@ async def _setup_method(self): """Prepare storages with test data.""" split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() - + rb_segment_storage = InMemoryRuleBasedSegmentStorageAsync() split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await split_storage.update([splits.from_raw(split)], [], -1) + for rbs in data['rbs']['d']: + await rb_segment_storage.update([rule_based_segments.from_raw(rbs)], [], 0) + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: data = json.loads(flo.read()) @@ -2237,6 +2309,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -2364,17 +2437,20 @@ async def _setup_method(self): split_storage = RedisSplitStorageAsync(redis_client) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await redis_client.set(split_storage._get_key(split['name']), json.dumps(split)) if split.get('sets') is not None: for flag_set in split.get('sets'): await redis_client.sadd(split_storage._get_flag_set_key(flag_set), split['name']) + await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['ff']['t']) - await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['till']) + for rbs in data['rbs']['d']: + await redis_client.set(rb_segment_storage._get_key(rbs['name']), json.dumps(rbs)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -2396,6 +2472,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -2560,7 +2637,10 @@ async def _clear_cache(self, redis_client): "SPLITIO.segment.employees.till", "SPLITIO.split.whitelist_feature", "SPLITIO.telemetry.latencies", - "SPLITIO.split.dependency_test" + "SPLITIO.split.dependency_test", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: await redis_client.delete(key) @@ -2579,16 +2659,20 @@ async def _setup_method(self): split_storage = RedisSplitStorageAsync(redis_client, True) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await redis_client.set(split_storage._get_key(split['name']), json.dumps(split)) if split.get('sets') is not None: for flag_set in split.get('sets'): await redis_client.sadd(split_storage._get_flag_set_key(flag_set), split['name']) - await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['till']) + await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, data['ff']['t']) + + for rbs in data['rbs']['d']: + await redis_client.set(rb_segment_storage._get_key(rbs['name']), json.dumps(rbs)) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -2610,6 +2694,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -2659,7 +2744,7 @@ async def test_localhost_json_e2e(self): assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' - assert await client.get_treatment("key", "SPLIT_2", None) == 'off' + assert await client.get_treatment("key", "SPLIT_2", None) == 'on' #?? self._update_temp_file(splits_json['splitChange1_3']) await self._synchronize_now() @@ -2717,7 +2802,7 @@ async def test_localhost_json_e2e(self): await self._synchronize_now() assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] - assert await client.get_treatment("key", "SPLIT_2", None) == 'on' + assert await client.get_treatment("key", "SPLIT_2", None) == 'off' #?? # Tests 6 await self.factory._storages['splits'].update([], ['SPLIT_2'], -1) @@ -2742,6 +2827,12 @@ async def test_localhost_json_e2e(self): assert await client.get_treatment("key", "SPLIT_1", None) == 'control' assert await client.get_treatment("key", "SPLIT_2", None) == 'on' + # rule based segment test + self._update_temp_file(splits_json['splitChange7_1']) + await self._synchronize_now() + assert await client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' + assert await client.get_treatment('mauro@split.io', 'rbs_feature_flag', {'email': 'mauro@split.io'}) == 'off' + def _update_temp_file(self, json_body): f = open(os.path.join(os.path.dirname(__file__), 'files','split_changes_temp.json'), 'w') f.write(json.dumps(json_body)) @@ -2821,6 +2912,7 @@ async def _setup_method(self): self.pluggable_storage_adapter = StorageMockAdapterAsync() split_storage = PluggableSplitStorageAsync(self.pluggable_storage_adapter, 'myprefix') segment_storage = PluggableSegmentStorageAsync(self.pluggable_storage_adapter, 'myprefix') + rb_segment_storage = PluggableRuleBasedSegmentsStorageAsync(self.pluggable_storage_adapter, 'myprefix') telemetry_pluggable_storage = await PluggableTelemetryStorageAsync.create(self.pluggable_storage_adapter, metadata, 'myprefix') telemetry_producer = TelemetryStorageProducerAsync(telemetry_pluggable_storage) @@ -2830,6 +2922,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorageAsync(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorageAsync(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -2858,11 +2951,14 @@ async def _setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) for flag_set in split.get('sets'): await self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) - await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['d']) + + for rbs in data['rbs']['d']: + await self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -3023,7 +3119,10 @@ async def _teardown_method(self): "SPLITIO.split.regex_test", "SPLITIO.segment.human_beigns.till", "SPLITIO.split.boolean_test", - "SPLITIO.split.dependency_test" + "SPLITIO.split.dependency_test", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: @@ -3041,6 +3140,7 @@ async def _setup_method(self): self.pluggable_storage_adapter = StorageMockAdapterAsync() split_storage = PluggableSplitStorageAsync(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorageAsync(self.pluggable_storage_adapter) + rb_segment_storage = PluggableRuleBasedSegmentsStorageAsync(self.pluggable_storage_adapter, 'myprefix') telemetry_pluggable_storage = await PluggableTelemetryStorageAsync.create(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducerAsync(telemetry_pluggable_storage) @@ -3050,6 +3150,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorageAsync(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorageAsync(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -3080,11 +3181,14 @@ async def _setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) for flag_set in split.get('sets'): await self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) - await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + await self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -3230,7 +3334,10 @@ async def _teardown_method(self): "SPLITIO.split.regex_test", "SPLITIO.segment.human_beigns.till", "SPLITIO.split.boolean_test", - "SPLITIO.split.dependency_test" + "SPLITIO.split.dependency_test", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: @@ -3248,6 +3355,7 @@ async def _setup_method(self): self.pluggable_storage_adapter = StorageMockAdapterAsync() split_storage = PluggableSplitStorageAsync(self.pluggable_storage_adapter) segment_storage = PluggableSegmentStorageAsync(self.pluggable_storage_adapter) + rb_segment_storage = PluggableRuleBasedSegmentsStorageAsync(self.pluggable_storage_adapter, 'myprefix') telemetry_pluggable_storage = await PluggableTelemetryStorageAsync.create(self.pluggable_storage_adapter, metadata) telemetry_producer = TelemetryStorageProducerAsync(telemetry_pluggable_storage) @@ -3257,6 +3365,7 @@ async def _setup_method(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': PluggableImpressionsStorageAsync(self.pluggable_storage_adapter, metadata), 'events': PluggableEventsStorageAsync(self.pluggable_storage_adapter, metadata), 'telemetry': telemetry_pluggable_storage @@ -3302,11 +3411,14 @@ async def _setup_method(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'splitChanges.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await self.pluggable_storage_adapter.set(split_storage._prefix.format(feature_flag_name=split['name']), split) for flag_set in split.get('sets'): await self.pluggable_storage_adapter.push_items(split_storage._flag_set_prefix.format(flag_set=flag_set), split['name']) - await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['till']) + await self.pluggable_storage_adapter.set(split_storage._feature_flag_till_prefix, data['ff']['t']) + + for rbs in data['rbs']['d']: + await self.pluggable_storage_adapter.set(rb_segment_storage._prefix.format(segment_name=rbs['name']), rbs) segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') with open(segment_fn, 'r') as flo: @@ -3461,7 +3573,10 @@ async def _teardown_method(self): "SPLITIO.split.regex_test", "SPLITIO.segment.human_beigns.till", "SPLITIO.split.boolean_test", - "SPLITIO.split.dependency_test" + "SPLITIO.split.dependency_test", + "SPLITIO.split.rbs_feature_flag", + "SPLITIO.rbsegments.till", + "SPLITIO.rbsegments.sample_rule_based_segment" ] for key in keys_to_delete: @@ -3475,9 +3590,9 @@ async def test_optimized(self): split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() - await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -3488,6 +3603,7 @@ async def test_optimized(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -3534,9 +3650,9 @@ async def test_debug(self): split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() - await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -3547,6 +3663,7 @@ async def test_debug(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -3593,9 +3710,9 @@ async def test_none(self): split_storage = InMemorySplitStorageAsync() segment_storage = InMemorySegmentStorageAsync() - await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['splits'][0]), - splits.from_raw(splits_json['splitChange1_1']['splits'][1]), - splits.from_raw(splits_json['splitChange1_1']['splits'][2]) + await split_storage.update([splits.from_raw(splits_json['splitChange1_1']['ff']['d'][0]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][1]), + splits.from_raw(splits_json['splitChange1_1']['ff']['d'][2]) ], [], -1) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -3606,6 +3723,7 @@ async def test_none(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': InMemoryRuleBasedSegmentStorageAsync(), 'impressions': InMemoryImpressionStorageAsync(5000, telemetry_runtime_producer), 'events': InMemoryEventStorageAsync(5000, telemetry_runtime_producer), } @@ -3659,10 +3777,11 @@ async def test_optimized(self): redis_client = await build_async(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorageAsync(redis_client, True) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = await RedisTelemetryStorageAsync.create(redis_client, metadata) @@ -3673,6 +3792,7 @@ async def test_optimized(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -3726,10 +3846,11 @@ async def test_debug(self): redis_client = await build_async(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorageAsync(redis_client, True) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = await RedisTelemetryStorageAsync.create(redis_client, metadata) @@ -3740,6 +3861,7 @@ async def test_debug(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -3793,10 +3915,11 @@ async def test_none(self): redis_client = await build_async(DEFAULT_CONFIG.copy()) split_storage = RedisSplitStorageAsync(redis_client, True) segment_storage = RedisSegmentStorageAsync(redis_client) + rb_segment_storage = RedisRuleBasedSegmentsStorageAsync(redis_client) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][0]['name']), json.dumps(splits_json['splitChange1_1']['splits'][0])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][1]['name']), json.dumps(splits_json['splitChange1_1']['splits'][1])) - await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['splits'][2]['name']), json.dumps(splits_json['splitChange1_1']['splits'][2])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][0]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][0])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][1]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][1])) + await redis_client.set(split_storage._get_key(splits_json['splitChange1_1']['ff']['d'][2]['name']), json.dumps(splits_json['splitChange1_1']['ff']['d'][2])) await redis_client.set(split_storage._FEATURE_FLAG_TILL_KEY, -1) telemetry_redis_storage = await RedisTelemetryStorageAsync.create(redis_client, metadata) @@ -3807,6 +3930,7 @@ async def test_none(self): storages = { 'splits': split_storage, 'segments': segment_storage, + 'rule_based_segments': rb_segment_storage, 'impressions': RedisImpressionsStorageAsync(redis_client, metadata), 'events': RedisEventsStorageAsync(redis_client, metadata), } @@ -3981,6 +4105,16 @@ async def _get_treatment_async(factory): if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): await _validate_last_impressions_async(client, ('regex_test', 'abc4', 'on')) + # test rule based segment matcher + assert await client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + await _validate_last_impressions_async(client, ('rbs_feature_flag', 'bilal@split.io', 'on')) + + # test rule based segment matcher + assert await client.get_treatment('mauro@split.io', 'rbs_feature_flag', {'email': 'mauro@split.io'}) == 'off' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + await _validate_last_impressions_async(client, ('rbs_feature_flag', 'mauro@split.io', 'off')) + async def _get_treatment_with_config_async(factory): """Test client.get_treatment_with_config().""" try: @@ -4265,5 +4399,5 @@ async def _manager_methods_async(factory): assert result.change_number == 123 assert result.configs['on'] == '{"size":15,"test":20}' - assert len(await manager.split_names()) == 7 - assert len(await manager.splits()) == 7 + assert len(await manager.split_names()) == 8 + assert len(await manager.splits()) == 8 diff --git a/tests/integration/test_pluggable_integration.py b/tests/integration/test_pluggable_integration.py index 844cde14..20545da5 100644 --- a/tests/integration/test_pluggable_integration.py +++ b/tests/integration/test_pluggable_integration.py @@ -23,12 +23,12 @@ def test_put_fetch(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: adapter.set(storage._prefix.format(feature_flag_name=split['name']), split) adapter.increment(storage._traffic_type_prefix.format(traffic_type_name=split['trafficTypeName']), 1) - adapter.set(storage._feature_flag_till_prefix, data['till']) + adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) - split_objects = [splits.from_raw(raw) for raw in data['splits']] + split_objects = [splits.from_raw(raw) for raw in data['ff']['d']] for split_object in split_objects: raw = split_object.to_json() @@ -53,8 +53,8 @@ def test_put_fetch(self): assert len(original_condition.matchers) == len(fetched_condition.matchers) assert len(original_condition.partitions) == len(fetched_condition.partitions) - adapter.set(storage._feature_flag_till_prefix, data['till']) - assert storage.get_change_number() == data['till'] + adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) + assert storage.get_change_number() == data['ff']['t'] assert storage.is_valid_traffic_type('user') is True assert storage.is_valid_traffic_type('account') is True @@ -89,12 +89,12 @@ def test_get_all(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: adapter.set(storage._prefix.format(feature_flag_name=split['name']), split) adapter.increment(storage._traffic_type_prefix.format(traffic_type_name=split['trafficTypeName']), 1) - adapter.set(storage._feature_flag_till_prefix, data['till']) + adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) - split_objects = [splits.from_raw(raw) for raw in data['splits']] + split_objects = [splits.from_raw(raw) for raw in data['ff']['d']] original_splits = {split.name: split for split in split_objects} fetched_names = storage.get_split_names() fetched_splits = {split.name: split for split in storage.get_all_splits()} @@ -260,12 +260,12 @@ async def test_put_fetch(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await adapter.set(storage._prefix.format(feature_flag_name=split['name']), split) await adapter.increment(storage._traffic_type_prefix.format(traffic_type_name=split['trafficTypeName']), 1) - await adapter.set(storage._feature_flag_till_prefix, data['till']) + await adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) - split_objects = [splits.from_raw(raw) for raw in data['splits']] + split_objects = [splits.from_raw(raw) for raw in data['ff']['d']] for split_object in split_objects: raw = split_object.to_json() @@ -290,8 +290,8 @@ async def test_put_fetch(self): assert len(original_condition.matchers) == len(fetched_condition.matchers) assert len(original_condition.partitions) == len(fetched_condition.partitions) - await adapter.set(storage._feature_flag_till_prefix, data['till']) - assert await storage.get_change_number() == data['till'] + await adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) + assert await storage.get_change_number() == data['ff']['t'] assert await storage.is_valid_traffic_type('user') is True assert await storage.is_valid_traffic_type('account') is True @@ -327,12 +327,12 @@ async def test_get_all(self): split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json') with open(split_fn, 'r') as flo: data = json.loads(flo.read()) - for split in data['splits']: + for split in data['ff']['d']: await adapter.set(storage._prefix.format(feature_flag_name=split['name']), split) await adapter.increment(storage._traffic_type_prefix.format(traffic_type_name=split['trafficTypeName']), 1) - await adapter.set(storage._feature_flag_till_prefix, data['till']) + await adapter.set(storage._feature_flag_till_prefix, data['ff']['t']) - split_objects = [splits.from_raw(raw) for raw in data['splits']] + split_objects = [splits.from_raw(raw) for raw in data['ff']['d']] original_splits = {split.name: split for split in split_objects} fetched_names = await storage.get_split_names() fetched_splits = {split.name: split for split in await storage.get_all_splits()} diff --git a/tests/integration/test_redis_integration.py b/tests/integration/test_redis_integration.py index e53ab4e2..4b70898b 100644 --- a/tests/integration/test_redis_integration.py +++ b/tests/integration/test_redis_integration.py @@ -28,7 +28,7 @@ def test_put_fetch(self): with open(os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json'), 'r') as flo: split_changes = json.load(flo) - split_objects = [splits.from_raw(raw) for raw in split_changes['splits']] + split_objects = [splits.from_raw(raw) for raw in split_changes['ff']['d']] for split_object in split_objects: raw = split_object.to_json() adapter.set(RedisSplitStorage._FEATURE_FLAG_KEY.format(feature_flag_name=split_object.name), json.dumps(raw)) @@ -55,8 +55,8 @@ def test_put_fetch(self): assert len(original_condition.matchers) == len(fetched_condition.matchers) assert len(original_condition.partitions) == len(fetched_condition.partitions) - adapter.set(RedisSplitStorage._FEATURE_FLAG_TILL_KEY, split_changes['till']) - assert storage.get_change_number() == split_changes['till'] + adapter.set(RedisSplitStorage._FEATURE_FLAG_TILL_KEY, split_changes['ff']['t']) + assert storage.get_change_number() == split_changes['ff']['t'] assert storage.is_valid_traffic_type('user') is True assert storage.is_valid_traffic_type('account') is True @@ -93,7 +93,7 @@ def test_get_all(self): with open(os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json'), 'r') as flo: split_changes = json.load(flo) - split_objects = [splits.from_raw(raw) for raw in split_changes['splits']] + split_objects = [splits.from_raw(raw) for raw in split_changes['ff']['d']] for split_object in split_objects: raw = split_object.to_json() adapter.set(RedisSplitStorage._FEATURE_FLAG_KEY.format(feature_flag_name=split_object.name), json.dumps(raw)) @@ -262,7 +262,7 @@ async def test_put_fetch(self): with open(os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json'), 'r') as flo: split_changes = json.load(flo) - split_objects = [splits.from_raw(raw) for raw in split_changes['splits']] + split_objects = [splits.from_raw(raw) for raw in split_changes['ff']['d']] for split_object in split_objects: raw = split_object.to_json() await adapter.set(RedisSplitStorage._FEATURE_FLAG_KEY.format(feature_flag_name=split_object.name), json.dumps(raw)) @@ -289,8 +289,8 @@ async def test_put_fetch(self): assert len(original_condition.matchers) == len(fetched_condition.matchers) assert len(original_condition.partitions) == len(fetched_condition.partitions) - await adapter.set(RedisSplitStorageAsync._FEATURE_FLAG_TILL_KEY, split_changes['till']) - assert await storage.get_change_number() == split_changes['till'] + await adapter.set(RedisSplitStorageAsync._FEATURE_FLAG_TILL_KEY, split_changes['ff']['t']) + assert await storage.get_change_number() == split_changes['ff']['t'] assert await storage.is_valid_traffic_type('user') is True assert await storage.is_valid_traffic_type('account') is True @@ -326,7 +326,7 @@ async def test_get_all(self): with open(os.path.join(os.path.dirname(__file__), 'files', 'split_changes.json'), 'r') as flo: split_changes = json.load(flo) - split_objects = [splits.from_raw(raw) for raw in split_changes['splits']] + split_objects = [splits.from_raw(raw) for raw in split_changes['ff']['d']] for split_object in split_objects: raw = split_object.to_json() await adapter.set(RedisSplitStorageAsync._FEATURE_FLAG_KEY.format(feature_flag_name=split_object.name), json.dumps(raw)) diff --git a/tests/integration/test_streaming_e2e.py b/tests/integration/test_streaming_e2e.py index a87ef59d..764475de 100644 --- a/tests/integration/test_streaming_e2e.py +++ b/tests/integration/test_streaming_e2e.py @@ -34,15 +34,17 @@ def test_happiness(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]}, + 'rbs': {'s': -1, 't': -1, 'd': []} }, - 1: { - 'since': 1, - 'till': 1, - 'splits': [] + 1: {'ff': { + 's': 1, + 't': 1, + 'd': []}, + 'rbs': {'s': -1, 't': -1, 'd': []} } } @@ -76,22 +78,26 @@ def test_happiness(self): assert(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events[len(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events)-1]._type == StreamingEventTypes.SYNC_MODE_UPDATE.value) assert(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events[len(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events)-1]._data == SSESyncMode.STREAMING.value) split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + 'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'s': -1, 't': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'s': -1, 't': -1, 'd': []}} sse_server.publish(make_split_change_event(2)) time.sleep(1) assert factory.client().get_treatment('maldo', 'split1') == 'off' split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_split_with_segment('split2', 2, True, False, - 'off', 'user', 'off', 'segment1')] - } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + 'ff': { + 's': 2, + 't': 3, + 'd': [make_split_with_segment('split2', 2, True, False, + 'off', 'user', 'off', 'segment1')]}, + 'rbs': {'s': -1, 't': -1, 'd': []} + } + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'s': -1, 't': -1, 'd': []}} segment_changes[('segment1', -1)] = { 'name': 'segment1', 'added': ['maldo'], @@ -141,49 +147,49 @@ def test_happiness(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after second notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Segment change notification @@ -222,12 +228,14 @@ def test_occupancy_flicker(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'s': -1, 't': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'s': -1, 't': -1, 'd': []}} } segment_changes = {} @@ -266,11 +274,12 @@ def test_occupancy_flicker(self): # After dropping occupancy, the sdk should switch to polling # and perform a syncAll that gets this change split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + 'ff': {'s': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_pri', 0)) sse_server.publish(make_occupancy('control_sec', 0)) @@ -282,11 +291,12 @@ def test_occupancy_flicker(self): # We restore occupancy, and it should be fetched by the # sync all after streaming is restored. split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + 'ff': {'s': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_pri', 1)) time.sleep(2) @@ -295,22 +305,24 @@ def test_occupancy_flicker(self): # Now we make another change and send an event so it's propagated split_changes[3] = { - 'since': 3, - 'till': 4, - 'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)] + 'ff': {'s': 3, + 't': 4, + 'd': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[4] = {'since': 4, 'till': 4, 'splits': []} + split_changes[4] = {'ff': {'s': 4, 't': 4, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(4)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'off' # Kill the split split_changes[4] = { - 'since': 4, - 'till': 5, - 'splits': [make_simple_split('split1', 5, True, True, 'frula', 'user', False)] + 'ff': {'s': 4, + 't': 5, + 'd': [make_simple_split('split1', 5, True, True, 'frula', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[5] = {'since': 5, 'till': 5, 'splits': []} + split_changes[5] = {'ff': {'s': 5, 't': 5, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_kill_event('split1', 'frula', 5)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'frula' @@ -342,73 +354,73 @@ def test_occupancy_flicker(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after second notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Split kill req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=5' + assert req.path == '/api/splitChanges?s=1.3&since=5&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -435,12 +447,14 @@ def test_start_without_occupancy(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -478,11 +492,13 @@ def test_start_without_occupancy(self): # After restoring occupancy, the sdk should switch to polling # and perform a syncAll that gets this change split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + 'ff': {'s': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_sec', 1)) time.sleep(2) @@ -516,43 +532,43 @@ def test_start_without_occupancy(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push down req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push restored req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Second iteration of previous syncAll req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -562,7 +578,7 @@ def test_start_without_occupancy(self): sse_server.publish(sse_server.GRACEFUL_REQUEST_END) sse_server.stop() split_backend.stop() - + def test_streaming_status_changes(self): """Test changes between streaming enabled, paused and disabled.""" auth_server_response = { @@ -579,12 +595,14 @@ def test_streaming_status_changes(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -623,11 +641,12 @@ def test_streaming_status_changes(self): # After dropping occupancy, the sdk should switch to polling # and perform a syncAll that gets this change split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + 'ff': {'s': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_PAUSED', 1)) time.sleep(2) @@ -638,11 +657,12 @@ def test_streaming_status_changes(self): # We restore occupancy, and it should be fetched by the # sync all after streaming is restored. split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + 'ff': {'s': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_ENABLED', 2)) time.sleep(2) @@ -651,22 +671,26 @@ def test_streaming_status_changes(self): # Now we make another change and send an event so it's propagated split_changes[3] = { - 'since': 3, - 'till': 4, - 'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)] + 'ff': {'s': 3, + 't': 4, + 'd': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[4] = {'since': 4, 'till': 4, 'splits': []} + split_changes[4] = {'ff': {'s': 4, 't': 4, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(4)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'off' assert not task.running() split_changes[4] = { - 'since': 4, - 'till': 5, - 'splits': [make_simple_split('split1', 5, True, False, 'off', 'user', True)] + 'ff': {'s': 4, + 't': 5, + 'd': [make_simple_split('split1', 5, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[5] = {'since': 5, 'till': 5, 'splits': []} + split_changes[5] = {'ff': {'s': 5, 't': 5, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_DISABLED', 2)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'on' @@ -700,73 +724,73 @@ def test_streaming_status_changes(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll on push down req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push is up req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming disabled req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=5' + assert req.path == '/api/splitChanges?s=1.3&since=5&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -793,15 +817,17 @@ def test_server_closes_connection(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: { - 'since': 1, - 'till': 1, - 'splits': [] + 1: {'ff': { + 's': 1, + 't': 1, + 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []} } } @@ -836,12 +862,14 @@ def test_server_closes_connection(self): assert not task.running() time.sleep(1) - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] - } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} + } + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(2)) time.sleep(1) assert factory.client().get_treatment('maldo', 'split1') == 'off' @@ -860,12 +888,14 @@ def test_server_closes_connection(self): time.sleep(2) assert not task.running() - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] - } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} + } + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': [], + 'rbs': {'t': -1, 's': -1, 'd': []}}} sse_server.publish(make_split_change_event(3)) time.sleep(1) assert factory.client().get_treatment('maldo', 'split1') == 'on' @@ -921,67 +951,67 @@ def test_server_closes_connection(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll on retryable error handling req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth after connection breaks req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected again req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after new notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -1015,12 +1045,14 @@ def test_ably_errors_handling(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1057,12 +1089,14 @@ def test_ably_errors_handling(self): # Make a change in the BE but don't send the event. # We'll send an ignorable error and check it has nothing happened - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_ably_error_event(60000, 600)) time.sleep(1) @@ -1083,12 +1117,14 @@ def test_ably_errors_handling(self): assert not task.running() # Assert streaming is working properly - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] - } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} + } + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(3)) time.sleep(2) assert factory.client().get_treatment('maldo', 'split1') == 'on' @@ -1152,67 +1188,67 @@ def test_ably_errors_handling(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll retriable error req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth again req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push is up req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after non recoverable ably error req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -1239,12 +1275,14 @@ def test_change_number(mocker): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1312,15 +1350,17 @@ async def test_happiness(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: { - 'since': 1, - 'till': 1, - 'splits': [] + 1: {'ff': { + 's': 1, + 't': 1, + 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []} } } @@ -1353,23 +1393,27 @@ async def test_happiness(self): await asyncio.sleep(1) assert(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events[len(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events)-1]._type == StreamingEventTypes.SYNC_MODE_UPDATE.value) assert(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events[len(factory._telemetry_evaluation_producer._telemetry_storage._streaming_events._streaming_events)-1]._data == SSESyncMode.STREAMING.value) - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] - } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} + } + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(2)) await asyncio.sleep(1) assert await factory.client().get_treatment('maldo', 'split1') == 'off' - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_split_with_segment('split2', 2, True, False, - 'off', 'user', 'off', 'segment1')] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_split_with_segment('split2', 2, True, False, + 'off', 'user', 'off', 'segment1')]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} segment_changes[('segment1', -1)] = { 'name': 'segment1', 'added': ['maldo'], @@ -1415,49 +1459,49 @@ async def test_happiness(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after second notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Segment change notification @@ -1495,12 +1539,14 @@ async def test_occupancy_flicker(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, + 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1538,13 +1584,13 @@ async def test_occupancy_flicker(self): # Make a change in the BE but don't send the event. # After dropping occupancy, the sdk should switch to polling # and perform a syncAll that gets this change - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} - + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_pri', 0)) sse_server.publish(make_occupancy('control_sec', 0)) await asyncio.sleep(2) @@ -1554,36 +1600,38 @@ async def test_occupancy_flicker(self): # We make another chagne in the BE and don't send the event. # We restore occupancy, and it should be fetched by the # sync all after streaming is restored. - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} - + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_pri', 1)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'on' assert not task.running() # Now we make another change and send an event so it's propagated - split_changes[3] = { - 'since': 3, - 'till': 4, - 'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)] + split_changes[3] = {'ff': { + 's': 3, + 't': 4, + 'd': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[4] = {'since': 4, 'till': 4, 'splits': []} + split_changes[4] = {'ff': {'s': 4, 't': 4, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(4)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'off' # Kill the split - split_changes[4] = { - 'since': 4, - 'till': 5, - 'splits': [make_simple_split('split1', 5, True, True, 'frula', 'user', False)] + split_changes[4] = {'ff': { + 's': 4, + 't': 5, + 'd': [make_simple_split('split1', 5, True, True, 'frula', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[5] = {'since': 5, 'till': 5, 'splits': []} + split_changes[5] = {'ff': {'s': 5, 't': 5, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_kill_event('split1', 'frula', 5)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'frula' @@ -1615,73 +1663,73 @@ async def test_occupancy_flicker(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after second notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Split kill req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=5' + assert req.path == '/api/splitChanges?s=1.3&since=5&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -1707,12 +1755,13 @@ async def test_start_without_occupancy(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1752,12 +1801,13 @@ async def test_start_without_occupancy(self): # Make a change in the BE but don't send the event. # After restoring occupancy, the sdk should switch to polling # and perform a syncAll that gets this change - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_occupancy('control_sec', 1)) await asyncio.sleep(2) @@ -1791,43 +1841,43 @@ async def test_start_without_occupancy(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push down req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push restored req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Second iteration of previous syncAll req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -1853,12 +1903,13 @@ async def test_streaming_status_changes(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -1899,12 +1950,13 @@ async def test_streaming_status_changes(self): # Make a change in the BE but don't send the event. # After dropping occupancy, the sdk should switch to polling # and perform a syncAll that gets this change - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_PAUSED', 1)) await asyncio.sleep(4) @@ -1915,12 +1967,13 @@ async def test_streaming_status_changes(self): # We make another chagne in the BE and don't send the event. # We restore occupancy, and it should be fetched by the # sync all after streaming is restored. - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_ENABLED', 2)) await asyncio.sleep(2) @@ -1929,24 +1982,26 @@ async def test_streaming_status_changes(self): assert not task.running() # Now we make another change and send an event so it's propagated - split_changes[3] = { - 'since': 3, - 'till': 4, - 'splits': [make_simple_split('split1', 4, True, False, 'off', 'user', False)] + split_changes[3] = {'ff': { + 's': 3, + 't': 4, + 'd': [make_simple_split('split1', 4, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[4] = {'since': 4, 'till': 4, 'splits': []} + split_changes[4] = {'ff': {'s': 4, 't': 4, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(4)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'off' assert not task.running() - split_changes[4] = { - 'since': 4, - 'till': 5, - 'splits': [make_simple_split('split1', 5, True, False, 'off', 'user', True)] + split_changes[4] = {'ff': { + 's': 4, + 't': 5, + 'd': [make_simple_split('split1', 5, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[5] = {'since': 5, 'till': 5, 'splits': []} + split_changes[5] = {'ff': {'s': 5, 't': 5, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_control_event('STREAMING_DISABLED', 2)) await asyncio.sleep(2) @@ -1980,73 +2035,73 @@ async def test_streaming_status_changes(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll on push down req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push is up req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming disabled req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=4' + assert req.path == '/api/splitChanges?s=1.3&since=4&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=5' + assert req.path == '/api/splitChanges?s=1.3&since=5&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -2072,16 +2127,13 @@ async def test_server_closes_connection(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'on', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'on', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: { - 'since': 1, - 'till': 1, - 'splits': [] - } + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -2114,12 +2166,13 @@ async def test_server_closes_connection(self): assert not task.running() await asyncio.sleep(1) - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(2)) await asyncio.sleep(1) assert await factory.client().get_treatment('maldo', 'split1') == 'off' @@ -2139,12 +2192,13 @@ async def test_server_closes_connection(self): await asyncio.sleep(2) assert not task.running() - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(3)) await asyncio.sleep(1) @@ -2201,67 +2255,67 @@ async def test_server_closes_connection(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after first notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll on retryable error handling req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth after connection breaks req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected again req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after new notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -2294,12 +2348,13 @@ async def test_ably_errors_handling(self): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} @@ -2338,12 +2393,13 @@ async def test_ably_errors_handling(self): # Make a change in the BE but don't send the event. # We'll send an ignorable error and check it has nothing happened - split_changes[1] = { - 'since': 1, - 'till': 2, - 'splits': [make_simple_split('split1', 2, True, False, 'off', 'user', False)] + split_changes[1] = {'ff': { + 's': 1, + 't': 2, + 'd': [make_simple_split('split1', 2, True, False, 'off', 'user', False)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[2] = {'since': 2, 'till': 2, 'splits': []} + split_changes[2] = {'ff': {'s': 2, 't': 2, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_ably_error_event(60000, 600)) await asyncio.sleep(1) @@ -2366,12 +2422,13 @@ async def test_ably_errors_handling(self): assert not task.running() # Assert streaming is working properly - split_changes[2] = { - 'since': 2, - 'till': 3, - 'splits': [make_simple_split('split1', 3, True, False, 'off', 'user', True)] + split_changes[2] = {'ff': { + 's': 2, + 't': 3, + 'd': [make_simple_split('split1', 3, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} } - split_changes[3] = {'since': 3, 'till': 3, 'splits': []} + split_changes[3] = {'ff': {'s': 3, 't': 3, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} sse_server.publish(make_split_change_event(3)) await asyncio.sleep(2) assert await factory.client().get_treatment('maldo', 'split1') == 'on' @@ -2434,67 +2491,67 @@ async def test_ably_errors_handling(self): # Initial splits fetch req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=-1' + assert req.path == '/api/splitChanges?s=1.3&since=-1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after streaming connected req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll retriable error req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=1' + assert req.path == '/api/splitChanges?s=1.3&since=1&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Auth again req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/v2/auth?s=1.1' + assert req.path == '/api/v2/auth?s=1.3' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after push is up req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Fetch after notification req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=2' + assert req.path == '/api/splitChanges?s=1.3&since=2&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Iteration until since == till req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # SyncAll after non recoverable ably error req = split_backend_requests.get() assert req.method == 'GET' - assert req.path == '/api/splitChanges?s=1.1&since=3' + assert req.path == '/api/splitChanges?s=1.3&since=3&rbSince=-1' assert req.headers['authorization'] == 'Bearer some_apikey' # Cleanup @@ -2520,12 +2577,13 @@ async def test_change_number(mocker): } split_changes = { - -1: { - 'since': -1, - 'till': 1, - 'splits': [make_simple_split('split1', 1, True, False, 'off', 'user', True)] + -1: {'ff': { + 's': -1, + 't': 1, + 'd': [make_simple_split('split1', 1, True, False, 'off', 'user', True)]}, + 'rbs': {'t': -1, 's': -1, 'd': []} }, - 1: {'since': 1, 'till': 1, 'splits': []} + 1: {'ff': {'s': 1, 't': 1, 'd': []}, 'rbs': {'t': -1, 's': -1, 'd': []}} } segment_changes = {} diff --git a/tests/models/grammar/test_matchers.py b/tests/models/grammar/test_matchers.py index bf582917..12de99e8 100644 --- a/tests/models/grammar/test_matchers.py +++ b/tests/models/grammar/test_matchers.py @@ -404,9 +404,9 @@ def test_matcher_behaviour(self, mocker): matcher = matchers.UserDefinedSegmentMatcher(self.raw) # Test that if the key if the storage wrapper finds the key in the segment, it matches. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True})}) is True + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True}, {}, {})}) is True # Test that if the key if the storage wrapper doesn't find the key in the segment, it fails. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False})}) is False + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False}, {}, {})}) is False def test_to_json(self): """Test that the object serializes to JSON properly.""" @@ -778,8 +778,8 @@ def test_matcher_behaviour(self, mocker): parsed = matchers.DependencyMatcher(cond_raw) evaluator = mocker.Mock(spec=Evaluator) - cond = condition.from_raw(splits_json["splitChange1_1"]["splits"][0]['conditions'][0]) - split = splits.from_raw(splits_json["splitChange1_1"]["splits"][0]) + cond = condition.from_raw(splits_json["splitChange1_1"]['ff']['d'][0]['conditions'][0]) + split = splits.from_raw(splits_json["splitChange1_1"]['ff']['d'][0]) evaluator.eval_with_context.return_value = {'treatment': 'on'} assert parsed.evaluate('SPLIT_2', {}, {'evaluator': evaluator, 'ec': [{'flags': [split], 'segment_memberships': {}}]}) is True diff --git a/tests/push/test_parser.py b/tests/push/test_parser.py index 6f4b57ff..faffb3d0 100644 --- a/tests/push/test_parser.py +++ b/tests/push/test_parser.py @@ -66,7 +66,7 @@ def test_event_parsing(self): assert parsed1.change_number == 1591996685190 assert parsed1.previous_change_number == 12 assert parsed1.compression == 2 - assert parsed1.feature_flag_definition == 'eJzEUtFu2kAQ/BU0z4d0hw2Be0MFRVGJIx' + assert parsed1.object_definition == 'eJzEUtFu2kAQ/BU0z4d0hw2Be0MFRVGJIx' e1 = make_message( 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_splits', @@ -77,7 +77,7 @@ def test_event_parsing(self): assert parsed1.change_number == 1591996685190 assert parsed1.previous_change_number == None assert parsed1.compression == None - assert parsed1.feature_flag_definition == None + assert parsed1.object_definition == None e2 = make_message( 'NDA5ODc2MTAyNg==_MzAyODY0NDkyOA==_segments', diff --git a/tests/storage/test_pluggable.py b/tests/storage/test_pluggable.py index 953a4510..a290d721 100644 --- a/tests/storage/test_pluggable.py +++ b/tests/storage/test_pluggable.py @@ -275,19 +275,19 @@ def test_get(self): for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorage(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split_name = splits_json['splitChange1_2']['splits'][0]['name'] + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split_name = splits_json['splitChange1_2']['ff']['d'][0]['name'] self.mock_adapter.set(pluggable_split_storage._prefix.format(feature_flag_name=split_name), split1.to_json()) - assert(pluggable_split_storage.get(split_name).to_json() == splits.from_raw(splits_json['splitChange1_2']['splits'][0]).to_json()) + assert(pluggable_split_storage.get(split_name).to_json() == splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]).to_json()) assert(pluggable_split_storage.get('not_existing') == None) def test_fetch_many(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorage(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split2_temp = splits_json['splitChange1_2']['splits'][0].copy() + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split2_temp = splits_json['splitChange1_2']['ff']['d'][0].copy() split2_temp['name'] = 'another_split' split2 = splits.from_raw(split2_temp) @@ -326,8 +326,8 @@ def test_get_split_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorage(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split2_temp = splits_json['splitChange1_2']['splits'][0].copy() + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split2_temp = splits_json['splitChange1_2']['ff']['d'][0].copy() split2_temp['name'] = 'another_split' split2 = splits.from_raw(split2_temp) self.mock_adapter.set(pluggable_split_storage._prefix.format(feature_flag_name=split1.name), split1.to_json()) @@ -411,12 +411,12 @@ async def test_get(self): for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorageAsync(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split_name = splits_json['splitChange1_2']['splits'][0]['name'] + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split_name = splits_json['splitChange1_2']['ff']['d'][0]['name'] await self.mock_adapter.set(pluggable_split_storage._prefix.format(feature_flag_name=split_name), split1.to_json()) split = await pluggable_split_storage.get(split_name) - assert(split.to_json() == splits.from_raw(splits_json['splitChange1_2']['splits'][0]).to_json()) + assert(split.to_json() == splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]).to_json()) assert(await pluggable_split_storage.get('not_existing') == None) @pytest.mark.asyncio @@ -424,8 +424,8 @@ async def test_fetch_many(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorageAsync(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split2_temp = splits_json['splitChange1_2']['splits'][0].copy() + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split2_temp = splits_json['splitChange1_2']['ff']['d'][0].copy() split2_temp['name'] = 'another_split' split2 = splits.from_raw(split2_temp) @@ -452,8 +452,8 @@ async def test_get_split_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_split_storage = PluggableSplitStorageAsync(self.mock_adapter, prefix=sprefix) - split1 = splits.from_raw(splits_json['splitChange1_2']['splits'][0]) - split2_temp = splits_json['splitChange1_2']['splits'][0].copy() + split1 = splits.from_raw(splits_json['splitChange1_2']['ff']['d'][0]) + split2_temp = splits_json['splitChange1_2']['ff']['d'][0].copy() split2_temp['name'] = 'another_split' split2 = splits.from_raw(split2_temp) await self.mock_adapter.set(pluggable_split_storage._prefix.format(feature_flag_name=split1.name), split1.to_json()) @@ -1386,11 +1386,11 @@ def test_get(self): for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs_name = rbsegments_json['segment1']['name'] + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs_name = rbsegments_json[0]['segment1']['name'] self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs_name), rbs1.to_json()) - assert(pluggable_rbs_storage.get(rbs_name).to_json() == rule_based_segments.from_raw(rbsegments_json['segment1']).to_json()) + assert(pluggable_rbs_storage.get(rbs_name).to_json() == rule_based_segments.from_raw(rbsegments_json[0]['segment1']).to_json()) assert(pluggable_rbs_storage.get('not_existing') == None) def test_get_change_number(self): @@ -1408,8 +1408,8 @@ def test_get_segment_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) @@ -1420,8 +1420,8 @@ def test_contains(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) @@ -1445,12 +1445,12 @@ async def test_get(self): for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs_name = rbsegments_json['segment1']['name'] + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs_name = rbsegments_json[0]['segment1']['name'] await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs_name), rbs1.to_json()) rbs = await pluggable_rbs_storage.get(rbs_name) - assert(rbs.to_json() == rule_based_segments.from_raw(rbsegments_json['segment1']).to_json()) + assert(rbs.to_json() == rule_based_segments.from_raw(rbsegments_json[0]['segment1']).to_json()) assert(await pluggable_rbs_storage.get('not_existing') == None) @pytest.mark.asyncio @@ -1470,8 +1470,8 @@ async def test_get_segment_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) @@ -1483,8 +1483,8 @@ async def test_contains(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) + rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) diff --git a/tests/sync/test_manager.py b/tests/sync/test_manager.py index b99c63a8..47ac3f01 100644 --- a/tests/sync/test_manager.py +++ b/tests/sync/test_manager.py @@ -24,7 +24,7 @@ from splitio.sync.event import EventSynchronizer from splitio.sync.synchronizer import Synchronizer, SynchronizerAsync, SplitTasks, SplitSynchronizers, RedisSynchronizer, RedisSynchronizerAsync from splitio.sync.manager import Manager, ManagerAsync, RedisManager, RedisManagerAsync -from splitio.storage import SplitStorage +from splitio.storage import SplitStorage, RuleBasedSegmentsStorage from splitio.api import APIException from splitio.client.util import SdkMetadata @@ -38,6 +38,7 @@ def test_error(self, mocker): mocker.Mock(), mocker.Mock()) storage = mocker.Mock(spec=SplitStorage) + rb_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) api = mocker.Mock() def run(x): @@ -46,7 +47,7 @@ def run(x): api.fetch_splits.side_effect = run storage.get_change_number.return_value = -1 - split_sync = SplitSynchronizer(api, storage) + split_sync = SplitSynchronizer(api, storage, rb_storage) synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) @@ -102,6 +103,7 @@ async def test_error(self, mocker): mocker.Mock(), mocker.Mock()) storage = mocker.Mock(spec=SplitStorage) + rb_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) api = mocker.Mock() async def run(x): @@ -112,7 +114,7 @@ async def get_change_number(): return -1 storage.get_change_number = get_change_number - split_sync = SplitSynchronizerAsync(api, storage) + split_sync = SplitSynchronizerAsync(api, storage, rb_storage) synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) diff --git a/tests/sync/test_segments_synchronizer.py b/tests/sync/test_segments_synchronizer.py index 6e8f7f78..5a6ef849 100644 --- a/tests/sync/test_segments_synchronizer.py +++ b/tests/sync/test_segments_synchronizer.py @@ -84,12 +84,12 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): assert segments_synchronizer.synchronize_segments() api_calls = [call for call in api.fetch_segment.mock_calls] - assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentB', -1, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentC', -1, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentB', 123, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentC', 123, FetchOptions(True, None, None, None)) in api_calls + assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentB', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls segment_put_calls = storage.put.mock_calls segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) @@ -128,8 +128,8 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer.synchronize_segment('segmentA') api_calls = [call for call in api.fetch_segment.mock_calls] - assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None)) in api_calls - assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None)) in api_calls + assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls def test_synchronize_segment_cdn(self, mocker): """Test particular segment update cdn bypass.""" @@ -173,12 +173,12 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) segments_synchronizer.synchronize_segment('segmentA') - assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None)) in api.fetch_segment.mock_calls - assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None)) in api.fetch_segment.mock_calls + assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api.fetch_segment.mock_calls + assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None, None)) in api.fetch_segment.mock_calls segments_synchronizer._backoff = Backoff(1, 0.1) segments_synchronizer.synchronize_segment('segmentA', 12345) - assert mocker.call('segmentA', 12345, FetchOptions(True, 1234, None, None)) in api.fetch_segment.mock_calls + assert mocker.call('segmentA', 12345, FetchOptions(True, 1234, None, None, None)) in api.fetch_segment.mock_calls assert len(api.fetch_segment.mock_calls) == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) def test_recreate(self, mocker): @@ -287,12 +287,12 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) assert await segments_synchronizer.synchronize_segments() - assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None)) - assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None)) - assert (self.segment[2], self.change[2], self.options[2]) == ('segmentB', -1, FetchOptions(True, None, None, None)) - assert (self.segment[3], self.change[3], self.options[3]) == ('segmentB', 123, FetchOptions(True, None, None, None)) - assert (self.segment[4], self.change[4], self.options[4]) == ('segmentC', -1, FetchOptions(True, None, None, None)) - assert (self.segment[5], self.change[5], self.options[5]) == ('segmentC', 123, FetchOptions(True, None, None, None)) + assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None, None)) + assert (self.segment[2], self.change[2], self.options[2]) == ('segmentB', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[3], self.change[3], self.options[3]) == ('segmentB', 123, FetchOptions(True, None, None, None, None)) + assert (self.segment[4], self.change[4], self.options[4]) == ('segmentC', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[5], self.change[5], self.options[5]) == ('segmentC', 123, FetchOptions(True, None, None, None, None)) segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) for segment in self.segment_put: @@ -343,8 +343,8 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) await segments_synchronizer.synchronize_segment('segmentA') - assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None)) - assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None)) + assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None, None)) await segments_synchronizer.shutdown() @@ -403,12 +403,12 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) await segments_synchronizer.synchronize_segment('segmentA') - assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None)) - assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None)) + assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) + assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None, None)) segments_synchronizer._backoff = Backoff(1, 0.1) await segments_synchronizer.synchronize_segment('segmentA', 12345) - assert (self.segment[7], self.change[7], self.options[7]) == ('segmentA', 12345, FetchOptions(True, 1234, None, None)) + assert (self.segment[7], self.change[7], self.options[7]) == ('segmentA', 12345, FetchOptions(True, 1234, None, None, None)) assert len(self.segment) == 8 # 2 ok + BACKOFF(2 since==till + 2 re-attempts) + CDN(2 since==till) await segments_synchronizer.shutdown() diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index ce1ade7e..3afb1f0d 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -1072,95 +1072,95 @@ def test_elements_sanitization(self, mocker): split_synchronizer = LocalSplitSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) # No changes when split structure is good - assert (split_synchronizer._sanitize_feature_flag_elements(splits_json["splitChange1_1"]["splits"]) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(splits_json["splitChange1_1"]['ff']['d']) == splits_json["splitChange1_1"]['ff']['d']) # test 'trafficTypeName' value None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficTypeName'] = None - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'trafficAllocation' value None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocation'] = None - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'trafficAllocation' valid value should not change - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocation'] = 50 assert (split_synchronizer._sanitize_feature_flag_elements(split) == split) # test 'trafficAllocation' invalid value should change - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocation'] = 110 - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'trafficAllocationSeed' is set to millisec epoch when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocationSeed'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['trafficAllocationSeed'] > 0) # test 'trafficAllocationSeed' is set to millisec epoch when 0 - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['trafficAllocationSeed'] = 0 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['trafficAllocationSeed'] > 0) # test 'seed' is set to millisec epoch when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['seed'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['seed'] > 0) # test 'seed' is set to millisec epoch when its 0 - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['seed'] = 0 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['seed'] > 0) # test 'status' is set to ACTIVE when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['status'] = None - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'status' is set to ACTIVE when incorrect - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['status'] = 'ww' - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test ''killed' is set to False when incorrect - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['killed'] = None - assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]["splits"]) + assert (split_synchronizer._sanitize_feature_flag_elements(split) == splits_json["splitChange1_1"]['ff']['d']) # test 'defaultTreatment' is set to on when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['defaultTreatment'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['defaultTreatment'] == 'control') # test 'defaultTreatment' is set to on when its empty - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['defaultTreatment'] = ' ' assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['defaultTreatment'] == 'control') # test 'changeNumber' is set to 0 when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['changeNumber'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['changeNumber'] == 0) # test 'changeNumber' is set to 0 when invalid - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['changeNumber'] = -33 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['changeNumber'] == 0) # test 'algo' is set to 2 when None - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['algo'] = None assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['algo'] == 2) # test 'algo' is set to 2 when higher than 2 - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['algo'] = 3 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['algo'] == 2) # test 'algo' is set to 2 when lower than 2 - split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]['algo'] = 1 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['algo'] == 2) @@ -1183,29 +1183,29 @@ def test_condition_sanitization(self, mocker): split_synchronizer = LocalSplitSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock()) # test missing all conditions with default rule set to 100% off - split = splits_json["splitChange1_1"]["splits"].copy() - target_split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() + target_split = splits_json["splitChange1_1"]['ff']['d'].copy() target_split[0]["conditions"][0]['partitions'][0]['size'] = 0 target_split[0]["conditions"][0]['partitions'][1]['size'] = 100 del split[0]["conditions"] assert (split_synchronizer._sanitize_feature_flag_elements(split) == target_split) # test missing ALL_KEYS condition matcher with default rule set to 100% off - split = splits_json["splitChange1_1"]["splits"].copy() - target_split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() + target_split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]["conditions"][0]["matcherGroup"]["matchers"][0]["matcherType"] = "IN_STR" target_split = split.copy() - target_split[0]["conditions"].append(splits_json["splitChange1_1"]["splits"][0]["conditions"][0]) + target_split[0]["conditions"].append(splits_json["splitChange1_1"]['ff']['d'][0]["conditions"][0]) target_split[0]["conditions"][1]['partitions'][0]['size'] = 0 target_split[0]["conditions"][1]['partitions'][1]['size'] = 100 assert (split_synchronizer._sanitize_feature_flag_elements(split) == target_split) # test missing ROLLOUT condition type with default rule set to 100% off - split = splits_json["splitChange1_1"]["splits"].copy() - target_split = splits_json["splitChange1_1"]["splits"].copy() + split = splits_json["splitChange1_1"]['ff']['d'].copy() + target_split = splits_json["splitChange1_1"]['ff']['d'].copy() split[0]["conditions"][0]["conditionType"] = "NOT" target_split = split.copy() - target_split[0]["conditions"].append(splits_json["splitChange1_1"]["splits"][0]["conditions"][0]) + target_split[0]["conditions"].append(splits_json["splitChange1_1"]['ff']['d'][0]["conditions"][0]) target_split[0]["conditions"][1]['partitions'][0]['size'] = 0 target_split[0]["conditions"][1]['partitions'][1]['size'] = 100 assert (split_synchronizer._sanitize_feature_flag_elements(split) == target_split) diff --git a/tests/sync/test_synchronizer.py b/tests/sync/test_synchronizer.py index 1e89af66..42985e4c 100644 --- a/tests/sync/test_synchronizer.py +++ b/tests/sync/test_synchronizer.py @@ -12,11 +12,12 @@ from splitio.sync.segment import SegmentSynchronizer, SegmentSynchronizerAsync, LocalSegmentSynchronizer, LocalSegmentSynchronizerAsync from splitio.sync.impression import ImpressionSynchronizer, ImpressionSynchronizerAsync, ImpressionsCountSynchronizer, ImpressionsCountSynchronizerAsync from splitio.sync.event import EventSynchronizer, EventSynchronizerAsync -from splitio.storage import SegmentStorage, SplitStorage +from splitio.storage import SegmentStorage, SplitStorage, RuleBasedSegmentsStorage from splitio.api import APIException, APIUriException from splitio.models.splits import Split from splitio.models.segments import Segment -from splitio.storage.inmemmory import InMemorySegmentStorage, InMemorySplitStorage, InMemorySegmentStorageAsync, InMemorySplitStorageAsync +from splitio.storage.inmemmory import InMemorySegmentStorage, InMemorySplitStorage, InMemorySegmentStorageAsync, InMemorySplitStorageAsync, \ + InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync splits = [{ 'changeNumber': 123, @@ -61,11 +62,11 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - def run(x, c): + def run(x, y, c): raise APIException("something broke") api.fetch_splits.side_effect = run - split_sync = SplitSynchronizer(api, storage) + split_sync = SplitSynchronizer(api, storage, mocker.Mock()) split_synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) sychronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -87,11 +88,11 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - def run(x, c): + def run(x, y, c): raise APIException("something broke", 414) api.fetch_splits.side_effect = run - split_sync = SplitSynchronizer(api, storage) + split_sync = SplitSynchronizer(api, storage, mocker.Mock()) split_synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -108,7 +109,7 @@ def test_sync_all_failed_segments(self, mocker): split_sync = mocker.Mock(spec=SplitSynchronizer) split_sync.synchronize_splits.return_value = None - def run(x, y): + def run(x, y, c): raise APIException("something broke") api.fetch_segment.side_effect = run @@ -122,10 +123,11 @@ def run(x, y): def test_synchronize_splits(self, mocker): split_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() split_api = mocker.Mock() - split_api.fetch_splits.return_value = {'splits': splits, 'since': 123, - 'till': 123} - split_sync = SplitSynchronizer(split_api, split_storage) + split_api.fetch_splits.return_value = {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} + split_sync = SplitSynchronizer(split_api, split_storage, rbs_storage) segment_storage = InMemorySegmentStorage() segment_api = mocker.Mock() segment_api.fetch_segment.return_value = {'name': 'segmentA', 'added': ['key1', 'key2', @@ -148,10 +150,12 @@ def test_synchronize_splits(self, mocker): def test_synchronize_splits_calling_segment_sync_once(self, mocker): split_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() split_api = mocker.Mock() - split_api.fetch_splits.return_value = {'splits': splits, 'since': 123, - 'till': 123} - split_sync = SplitSynchronizer(split_api, split_storage) + split_api.fetch_splits.return_value = {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} + + split_sync = SplitSynchronizer(split_api, split_storage, rbs_storage) counts = {'segments': 0} def sync_segments(*_): @@ -171,6 +175,7 @@ def sync_segments(*_): def test_sync_all(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) split_storage.get_change_number.return_value = 123 split_storage.get_segment_names.return_value = ['segmentA'] class flag_set_filter(): @@ -183,9 +188,9 @@ def intersect(sets): split_storage.flag_set_filter.sorted_flag_sets = [] split_api = mocker.Mock() - split_api.fetch_splits.return_value = {'splits': splits, 'since': 123, - 'till': 123} - split_sync = SplitSynchronizer(split_api, split_storage) + split_api.fetch_splits.return_value = {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} + split_sync = SplitSynchronizer(split_api, split_storage, rbs_storage) segment_storage = mocker.Mock(spec=SegmentStorage) segment_storage.get_change_number.return_value = 123 @@ -389,6 +394,7 @@ class SynchronizerAsyncTests(object): async def test_sync_all_failed_splits(self, mocker): api = mocker.Mock() storage = mocker.Mock() + rbs_storage = mocker.Mock() class flag_set_filter(): def should_filter(): return False @@ -398,15 +404,16 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] - async def run(x, c): + async def run(x, y, c): raise APIException("something broke") api.fetch_splits = run async def get_change_number(): return 1234 storage.get_change_number = get_change_number + rbs_storage.get_change_number = get_change_number - split_sync = SplitSynchronizerAsync(api, storage) + split_sync = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) sychronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -420,6 +427,7 @@ async def get_change_number(): async def test_sync_all_failed_splits_with_flagsets(self, mocker): api = mocker.Mock() storage = mocker.Mock() + rbs_storage = mocker.Mock() class flag_set_filter(): def should_filter(): return False @@ -432,12 +440,13 @@ def intersect(sets): async def get_change_number(): pass storage.get_change_number = get_change_number - - async def run(x, c): + rbs_storage.get_change_number = get_change_number + + async def run(x, y, c): raise APIException("something broke", 414) api.fetch_splits = run - split_sync = SplitSynchronizerAsync(api, storage) + split_sync = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -457,7 +466,7 @@ async def test_sync_all_failed_segments(self, mocker): split_sync = mocker.Mock(spec=SplitSynchronizer) split_sync.synchronize_splits.return_value = None - async def run(x, y): + async def run(x, y, c): raise APIException("something broke") api.fetch_segment = run @@ -477,14 +486,16 @@ async def get_segment_names(): @pytest.mark.asyncio async def test_synchronize_splits(self, mocker): split_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() split_api = mocker.Mock() - async def fetch_splits(change, options): - return {'splits': splits, 'since': 123, - 'till': 123} + async def fetch_splits(change, rb, options): + return {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} + split_api.fetch_splits = fetch_splits - split_sync = SplitSynchronizerAsync(split_api, split_storage) + split_sync = SplitSynchronizerAsync(split_api, split_storage, rbs_storage) segment_storage = InMemorySegmentStorageAsync() segment_api = mocker.Mock() @@ -518,17 +529,18 @@ async def fetch_segment(segment_name, change, options): @pytest.mark.asyncio async def test_synchronize_splits_calling_segment_sync_once(self, mocker): split_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() async def get_change_number(): return 123 split_storage.get_change_number = get_change_number split_api = mocker.Mock() - async def fetch_splits(change, options): - return {'splits': splits, 'since': 123, - 'till': 123} + async def fetch_splits(change, rb, options): + return {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} split_api.fetch_splits = fetch_splits - split_sync = SplitSynchronizerAsync(split_api, split_storage) + split_sync = SplitSynchronizerAsync(split_api, split_storage, rbs_storage) counts = {'segments': 0} segment_sync = mocker.Mock() @@ -552,6 +564,7 @@ async def segment_exist_in_storage(segment): @pytest.mark.asyncio async def test_sync_all(self, mocker): split_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() async def get_change_number(): return 123 split_storage.get_change_number = get_change_number @@ -576,11 +589,12 @@ def intersect(sets): split_storage.flag_set_filter.sorted_flag_sets = [] split_api = mocker.Mock() - async def fetch_splits(change, options): - return {'splits': splits, 'since': 123, 'till': 123} + async def fetch_splits(change, rb, options): + return {'ff': {'d': splits, 's': 123, + 't': 123}, 'rbs': {'d': [], 's': -1, 't': -1}} split_api.fetch_splits = fetch_splits - split_sync = SplitSynchronizerAsync(split_api, split_storage) + split_sync = SplitSynchronizerAsync(split_api, split_storage, rbs_storage) segment_storage = InMemorySegmentStorageAsync() async def get_change_number(segment): return 123 diff --git a/tests/sync/test_telemetry.py b/tests/sync/test_telemetry.py index c3aaac52..898216f8 100644 --- a/tests/sync/test_telemetry.py +++ b/tests/sync/test_telemetry.py @@ -169,7 +169,7 @@ def record_stats(*args, **kwargs): "spC": 1, "seC": 1, "skC": 0, - "ufs": {"sp": 3}, + "ufs": {"rbs": 0, "sp": 3}, "t": ['tag1'] }) @@ -294,6 +294,6 @@ async def record_stats(*args, **kwargs): "spC": 1, "seC": 1, "skC": 0, - "ufs": {"sp": 3}, + "ufs": {"rbs": 0, "sp": 3}, "t": ['tag1'] }) diff --git a/tests/tasks/test_segment_sync.py b/tests/tasks/test_segment_sync.py index 930d3f86..d5640709 100644 --- a/tests/tasks/test_segment_sync.py +++ b/tests/tasks/test_segment_sync.py @@ -62,7 +62,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 0 api = mocker.Mock() - fetch_options = FetchOptions(True, None, None, None) + fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment.side_effect = fetch_segment_mock segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) @@ -139,7 +139,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 0 api = mocker.Mock() - fetch_options = FetchOptions(True, None, None, None) + fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment.side_effect = fetch_segment_mock segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) @@ -238,7 +238,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 0 api = mocker.Mock() - fetch_options = FetchOptions(True, None, None, None) + fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment = fetch_segment_mock segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) @@ -326,7 +326,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 0 api = mocker.Mock() - fetch_options = FetchOptions(True, None, None, None) + fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment = fetch_segment_mock segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) diff --git a/tests/tasks/test_split_sync.py b/tests/tasks/test_split_sync.py index 9e9267e5..c1ec3620 100644 --- a/tests/tasks/test_split_sync.py +++ b/tests/tasks/test_split_sync.py @@ -6,7 +6,7 @@ from splitio.api import APIException from splitio.api.commons import FetchOptions from splitio.tasks import split_sync -from splitio.storage import SplitStorage +from splitio.storage import SplitStorage, RuleBasedSegmentsStorage from splitio.models.splits import Split from splitio.sync.split import SplitSynchronizer, SplitSynchronizerAsync from splitio.optional.loaders import asyncio @@ -53,6 +53,7 @@ class SplitSynchronizationTests(object): def test_normal_operation(self, mocker): """Test the normal operation flow.""" storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) def change_number_mock(): change_number_mock._calls += 1 @@ -78,22 +79,19 @@ def get_changes(*args, **kwargs): get_changes.called += 1 if get_changes.called == 1: - return { - 'splits': splits, - 'since': -1, - 'till': 123 + return {'ff': { + 'd': splits, + 's': -1, + 't': 123}, 'rbs': {'d': [], 't': -1, 's': -1} } else: - return { - 'splits': [], - 'since': 123, - 'till': 123 - } + return {'ff': {'d': [],'s': 123, 't': 123}, + 'rbs': {'d': [], 't': -1, 's': -1}} get_changes.called = 0 fetch_options = FetchOptions(True) api.fetch_splits.side_effect = get_changes - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) task = split_sync.SplitSynchronizationTask(split_synchronizer.synchronize_splits, 0.5) task.start() time.sleep(0.7) @@ -103,9 +101,9 @@ def get_changes(*args, **kwargs): stop_event.wait() assert not task.is_running() assert api.fetch_splits.mock_calls[0][1][0] == -1 - assert api.fetch_splits.mock_calls[0][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[0][1][2].cache_control_headers == True assert api.fetch_splits.mock_calls[1][1][0] == 123 - assert api.fetch_splits.mock_calls[1][1][1].cache_control_headers == True + assert api.fetch_splits.mock_calls[1][1][2].cache_control_headers == True inserted_split = storage.update.mock_calls[0][1][0][0] assert isinstance(inserted_split, Split) @@ -114,20 +112,23 @@ def get_changes(*args, **kwargs): def test_that_errors_dont_stop_task(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) api = mocker.Mock() def run(x): run._calls += 1 if run._calls == 1: - return {'splits': [], 'since': -1, 'till': -1} + return {'ff': {'d': [],'s': -1, 't': -1}, + 'rbs': {'d': [], 't': -1, 's': -1}} if run._calls == 2: - return {'splits': [], 'since': -1, 'till': -1} + return {'ff': {'d': [],'s': -1, 't': -1}, + 'rbs': {'d': [], 't': -1, 's': -1}} raise APIException("something broke") run._calls = 0 api.fetch_splits.side_effect = run storage.get_change_number.return_value = -1 - split_synchronizer = SplitSynchronizer(api, storage) + split_synchronizer = SplitSynchronizer(api, storage, rbs_storage) task = split_sync.SplitSynchronizationTask(split_synchronizer.synchronize_splits, 0.5) task.start() time.sleep(0.1) @@ -144,6 +145,7 @@ class SplitSynchronizationAsyncTests(object): async def test_normal_operation(self, mocker): """Test the normal operation flow.""" storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) async def change_number_mock(): change_number_mock._calls += 1 @@ -152,6 +154,9 @@ async def change_number_mock(): return 123 change_number_mock._calls = 0 storage.get_change_number = change_number_mock + async def rb_change_number_mock(): + return -1 + rbs_storage.get_change_number = rb_change_number_mock class flag_set_filter(): def should_filter(): @@ -167,26 +172,20 @@ async def set_change_number(*_): pass change_number_mock._calls = 0 storage.set_change_number = set_change_number - + api = mocker.Mock() self.change_number = [] self.fetch_options = [] - async def get_changes(change_number, fetch_options): + async def get_changes(change_number, rb_change_number, fetch_options): self.change_number.append(change_number) self.fetch_options.append(fetch_options) get_changes.called += 1 if get_changes.called == 1: - return { - 'splits': splits, - 'since': -1, - 'till': 123 - } + return {'ff': {'d': splits,'s': -1, 't': 123}, + 'rbs': {'d': [], 't': -1, 's': -1}} else: - return { - 'splits': [], - 'since': 123, - 'till': 123 - } + return {'ff': {'d': [],'s': 123, 't': 123}, + 'rbs': {'d': [], 't': -1, 's': -1}} api.fetch_splits = get_changes get_changes.called = 0 self.inserted_split = None @@ -194,12 +193,15 @@ async def update(split, deleted, change_number): if len(split) > 0: self.inserted_split = split storage.update = update - + async def rbs_update(split, deleted, change_number): + pass + rbs_storage.update = rbs_update + fetch_options = FetchOptions(True) - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) task = split_sync.SplitSynchronizationTaskAsync(split_synchronizer.synchronize_splits, 0.5) task.start() - await asyncio.sleep(1) + await asyncio.sleep(2) assert task.is_running() await task.stop() assert not task.is_running() @@ -212,14 +214,17 @@ async def update(split, deleted, change_number): async def test_that_errors_dont_stop_task(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) api = mocker.Mock() async def run(x): run._calls += 1 if run._calls == 1: - return {'splits': [], 'since': -1, 'till': -1} + return {'ff': {'d': [],'s': -1, 't': -1}, + 'rbs': {'d': [], 't': -1, 's': -1}} if run._calls == 2: - return {'splits': [], 'since': -1, 'till': -1} + return {'ff': {'d': [],'s': -1, 't': -1}, + 'rbs': {'d': [], 't': -1, 's': -1}} raise APIException("something broke") run._calls = 0 api.fetch_splits = run @@ -228,7 +233,7 @@ async def get_change_number(): return -1 storage.get_change_number = get_change_number - split_synchronizer = SplitSynchronizerAsync(api, storage) + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) task = split_sync.SplitSynchronizationTaskAsync(split_synchronizer.synchronize_splits, 0.5) task.start() await asyncio.sleep(0.1) From e070b9041ded6e524c83ca62941e97e70ac4cebd Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Wed, 19 Mar 2025 09:42:12 -0700 Subject: [PATCH 19/56] fixed tests --- tests/integration/__init__.py | 14 ++++++-------- tests/integration/test_client_e2e.py | 8 ++++---- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index 124f5b37..bec5cd6f 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -1,15 +1,13 @@ import copy -rbsegments_json = [{ - "segment1": {"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []} -}] +rbsegments_json = [{"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []}] split11 = {"ff": {"t": 1675443569027, "s": -1, "d": [ {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": False}, {"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202,"seed": -1442762199, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443537882,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}], "sets": ["set_1", "set_2"]}, {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": True} ]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} -split12 = {"ff": {"s": 1675443569027,"t": 167544376728, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} +split12 = {"ff": {"s": 1675443569027,"t": 1675443767284, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} split13 = {"ff": {"s": 1675443767288,"t": 1675443984594, "d": [ {"trafficTypeName": "user","name": "SPLIT_1","trafficAllocation": 100,"trafficAllocationSeed": -1780071202,"seed": -1442762199,"status": "ARCHIVED","killed": False,"defaultTreatment": "off","changeNumber": 1675443984594,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}]}, {"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443954220,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]} @@ -29,13 +27,13 @@ "splitChange1_2": split12, "splitChange1_3": split13, "splitChange2_1": {"ff": {"t": -1, "s": -1, "d": [{"name": "SPLIT_1","status": "ACTIVE","killed": False,"defaultTreatment": "off","configurations": {},"conditions": []}]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, - "splitChange3_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, - "splitChange3_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569027,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange3_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"s": -1,"t": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange3_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"s": 1675443569027,"t": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, "splitChange4_1": split41, "splitChange4_2": split42, "splitChange4_3": split43, - "splitChange5_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": -1,"till": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, - "splitChange5_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"since": 1675443569026,"till": 1675443569026}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange5_1": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": False,"defaultTreatment": "off","changeNumber": 1675443569027,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"s": -1,"t": 1675443569027}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, + "splitChange5_2": {"ff": {"t": -1, "s": -1, "d": [{"trafficTypeName": "user","name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779,"seed": -113875324,"status": "ACTIVE","killed": True,"defaultTreatment": "off","changeNumber": 1675443767288,"algo": 2,"configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}]}],"s": 1675443569026,"t": 1675443569026}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}}, "splitChange6_1": split61, "splitChange6_2": split62, "splitChange6_3": split63, diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index c8a6a666..b1f5d836 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -1020,7 +1020,7 @@ def test_localhost_json_e2e(self): assert sorted(self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert client.get_treatment("key", "SPLIT_1", None) == 'off' - assert client.get_treatment("key", "SPLIT_2", None) == 'on' #?? + assert client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange1_3']) self._synchronize_now() @@ -1078,7 +1078,7 @@ def test_localhost_json_e2e(self): self._synchronize_now() assert sorted(self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] - assert client.get_treatment("key", "SPLIT_2", None) == 'off' #?? + assert client.get_treatment("key", "SPLIT_2", None) == 'on' # Tests 6 self.factory._storages['splits'].update([], ['SPLIT_2'], -1) @@ -2744,7 +2744,7 @@ async def test_localhost_json_e2e(self): assert sorted(await self.factory.manager().split_names()) == ["SPLIT_1", "SPLIT_2", "SPLIT_3"] assert await client.get_treatment("key", "SPLIT_1", None) == 'off' - assert await client.get_treatment("key", "SPLIT_2", None) == 'on' #?? + assert await client.get_treatment("key", "SPLIT_2", None) == 'off' self._update_temp_file(splits_json['splitChange1_3']) await self._synchronize_now() @@ -2802,7 +2802,7 @@ async def test_localhost_json_e2e(self): await self._synchronize_now() assert sorted(await self.factory.manager().split_names()) == ["SPLIT_2", "SPLIT_3"] - assert await client.get_treatment("key", "SPLIT_2", None) == 'off' #?? + assert await client.get_treatment("key", "SPLIT_2", None) == 'on' # Tests 6 await self.factory._storages['splits'].update([], ['SPLIT_2'], -1) From 2e7f5d33fa01224f71b153557e3b0ceb31c39c59 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Sun, 23 Mar 2025 22:43:41 -0700 Subject: [PATCH 20/56] updated storage helper and evaluator --- splitio/engine/evaluator.py | 22 +-- splitio/models/rule_based_segments.py | 8 ++ splitio/storage/inmemmory.py | 2 +- splitio/util/storage_helper.py | 2 + tests/engine/test_evaluator.py | 49 ++++++- tests/models/test_rule_based_segments.py | 26 +++- tests/storage/test_pluggable.py | 28 ++-- tests/util/test_storage_helper.py | 170 ++++++++++++++++++++++- 8 files changed, 272 insertions(+), 35 deletions(-) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index 80a75eec..3bd11512 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -133,16 +133,19 @@ def context_for(self, key, feature_names): key_membership = False segment_memberhsip = False for rbs_segment in pending_rbs_memberships: - key_membership = key in self._rbs_segment_storage.get(rbs_segment).excluded.get_excluded_keys() + rbs_segment_obj = self._rbs_segment_storage.get(rbs_segment) + pending_memberships.update(rbs_segment_obj.get_condition_segment_names()) + + key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() segment_memberhsip = False - for segment_name in self._rbs_segment_storage.get(rbs_segment).excluded.get_excluded_segments(): + for segment_name in rbs_segment_obj.excluded.get_excluded_segments(): if self._segment_storage.segment_contains(segment_name, key): segment_memberhsip = True break rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) if not (segment_memberhsip or key_membership): - rbs_segment_conditions.update({rbs_segment: [condition for condition in self._rbs_segment_storage.get(rbs_segment).conditions]}) + rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) return EvaluationContext( splits, @@ -184,18 +187,14 @@ async def context_for(self, key, feature_names): pending_memberships.update(cs) pending_rbs_memberships.update(crbs) - segment_names = list(pending_memberships) - segment_memberships = await asyncio.gather(*[ - self._segment_storage.segment_contains(segment, key) - for segment in segment_names - ]) - rbs_segment_memberships = {} rbs_segment_conditions = {} key_membership = False segment_memberhsip = False for rbs_segment in pending_rbs_memberships: rbs_segment_obj = await self._rbs_segment_storage.get(rbs_segment) + pending_memberships.update(rbs_segment_obj.get_condition_segment_names()) + key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() segment_memberhsip = False for segment_name in rbs_segment_obj.excluded.get_excluded_segments(): @@ -207,6 +206,11 @@ async def context_for(self, key, feature_names): if not (segment_memberhsip or key_membership): rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) + segment_names = list(pending_memberships) + segment_memberships = await asyncio.gather(*[ + self._segment_storage.segment_contains(segment, key) + for segment in segment_names + ]) return EvaluationContext( splits, dict(zip(segment_names, segment_memberships)), diff --git a/splitio/models/rule_based_segments.py b/splitio/models/rule_based_segments.py index 66ec7ddf..f611a792 100644 --- a/splitio/models/rule_based_segments.py +++ b/splitio/models/rule_based_segments.py @@ -76,6 +76,14 @@ def to_json(self): 'excluded': self.excluded.to_json() } + def get_condition_segment_names(self): + segments = set() + for condition in self._conditions: + for matcher in condition.matchers: + if matcher._matcher_type == 'IN_SEGMENT': + segments.add(matcher.to_json()['userDefinedSegmentMatcherData']['segmentName']) + return segments + def from_raw(raw_rule_based_segment): """ Parse a Rule based segment from a JSON portion of splitChanges. diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index 98fc0543..c3fb09ec 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -200,7 +200,7 @@ def get_segment_names(self): """ with self._lock: return list(self._rule_based_segments.keys()) - + def get_large_segment_names(self): """ Retrieve a list of all excluded large segments names. diff --git a/splitio/util/storage_helper.py b/splitio/util/storage_helper.py index f547a701..699f4871 100644 --- a/splitio/util/storage_helper.py +++ b/splitio/util/storage_helper.py @@ -53,6 +53,7 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg if rule_based_segment.status == "ACTIVE": to_add.append(rule_based_segment) segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) + segment_list.update(rule_based_segment.get_condition_segment_names()) else: if rule_based_segment_storage.get(rule_based_segment.name) is not None: to_delete.append(rule_based_segment.name) @@ -109,6 +110,7 @@ async def update_rule_based_segment_storage_async(rule_based_segment_storage, ru if rule_based_segment.status == "ACTIVE": to_add.append(rule_based_segment) segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) + segment_list.update(rule_based_segment.get_condition_segment_names()) else: if await rule_based_segment_storage.get(rule_based_segment.name) is not None: to_delete.append(rule_based_segment.name) diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 6268ad1d..de8f9325 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -1,6 +1,7 @@ """Evaluator tests module.""" import logging import pytest +import copy from splitio.models.splits import Split, Status from splitio.models.grammar.condition import Condition, ConditionType @@ -243,7 +244,7 @@ def test_evaluate_treatment_with_rule_based_segment(self, mocker): ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': True}, segment_rbs_conditions={'sample_rule_based_segment': []}) result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) assert result['treatment'] == 'off' - + class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" @@ -254,37 +255,75 @@ def test_get_context(self): segment_storage = InMemorySegmentStorage() rbs_segment_storage = InMemoryRuleBasedSegmentStorage() flag_storage.update([mocked_split], [], -1) - rbs = rule_based_segments.from_raw(rbs_raw) + rbs = copy.deepcopy(rbs_raw) + rbs['conditions'].append( + {"matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "employees" + }, + "whitelistMatcherData": None + } + ] + }, + }) + rbs = rule_based_segments.from_raw(rbs) rbs_segment_storage.update([rbs], [], -1) eval_factory = EvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) ec = eval_factory.context_for('bilal@split.io', ['some']) assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + assert ec.segment_memberships == {"employees": False} + segment_storage.update("employees", {"mauro@split.io"}, {}, 1234) ec = eval_factory.context_for('mauro@split.io', ['some']) assert ec.segment_rbs_conditions == {} assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} - + assert ec.segment_memberships == {"employees": True} + class EvaluationDataFactoryAsyncTests(object): """Test evaluation factory class.""" @pytest.mark.asyncio async def test_get_context(self): """Test context.""" - mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + mocked_split = Split('some', 123, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) flag_storage = InMemorySplitStorageAsync([]) segment_storage = InMemorySegmentStorageAsync() rbs_segment_storage = InMemoryRuleBasedSegmentStorageAsync() await flag_storage.update([mocked_split], [], -1) - rbs = rule_based_segments.from_raw(rbs_raw) + rbs = copy.deepcopy(rbs_raw) + rbs['conditions'].append( + {"matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "employees" + }, + "whitelistMatcherData": None + } + ] + }, + }) + rbs = rule_based_segments.from_raw(rbs) await rbs_segment_storage.update([rbs], [], -1) eval_factory = AsyncEvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) ec = await eval_factory.context_for('bilal@split.io', ['some']) assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + assert ec.segment_memberships == {"employees": False} + await segment_storage.update("employees", {"mauro@split.io"}, {}, 1234) ec = await eval_factory.context_for('mauro@split.io', ['some']) assert ec.segment_rbs_conditions == {} assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} + assert ec.segment_memberships == {"employees": True} diff --git a/tests/models/test_rule_based_segments.py b/tests/models/test_rule_based_segments.py index 96cbdd30..9a822903 100644 --- a/tests/models/test_rule_based_segments.py +++ b/tests/models/test_rule_based_segments.py @@ -1,6 +1,6 @@ """Split model tests module.""" import copy - +import pytest from splitio.models import rule_based_segments from splitio.models import splits from splitio.models.grammar.condition import Condition @@ -79,4 +79,26 @@ def test_incorrect_matcher(self): rbs['conditions'].append(rbs['conditions'][0]) rbs['conditions'][0]['matcherGroup']['matchers'][0]['matcherType'] = 'INVALID_MATCHER' parsed = rule_based_segments.from_raw(rbs) - assert parsed.conditions[0].to_json() == splits._DEFAULT_CONDITIONS_TEMPLATE \ No newline at end of file + assert parsed.conditions[0].to_json() == splits._DEFAULT_CONDITIONS_TEMPLATE + + def test_get_condition_segment_names(self): + rbs = copy.deepcopy(self.raw) + rbs['conditions'].append( + {"matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "employees" + }, + "whitelistMatcherData": None + } + ] + }, + }) + rbs = rule_based_segments.from_raw(rbs) + + assert rbs.get_condition_segment_names() == {"employees"} + \ No newline at end of file diff --git a/tests/storage/test_pluggable.py b/tests/storage/test_pluggable.py index a290d721..283eb8e3 100644 --- a/tests/storage/test_pluggable.py +++ b/tests/storage/test_pluggable.py @@ -1386,11 +1386,11 @@ def test_get(self): for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) - rbs_name = rbsegments_json[0]['segment1']['name'] + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]) + rbs_name = rbsegments_json[0]['name'] self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs_name), rbs1.to_json()) - assert(pluggable_rbs_storage.get(rbs_name).to_json() == rule_based_segments.from_raw(rbsegments_json[0]['segment1']).to_json()) + assert(pluggable_rbs_storage.get(rbs_name).to_json() == rule_based_segments.from_raw(rbsegments_json[0]).to_json()) assert(pluggable_rbs_storage.get('not_existing') == None) def test_get_change_number(self): @@ -1408,8 +1408,8 @@ def test_get_segment_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]) + rbs2_temp = copy.deepcopy(rbsegments_json[0]) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) @@ -1420,8 +1420,8 @@ def test_contains(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorage(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]) + rbs2_temp = copy.deepcopy(rbsegments_json[0]) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) @@ -1445,12 +1445,12 @@ async def test_get(self): for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) - rbs_name = rbsegments_json[0]['segment1']['name'] + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]) + rbs_name = rbsegments_json[0]['name'] await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs_name), rbs1.to_json()) rbs = await pluggable_rbs_storage.get(rbs_name) - assert(rbs.to_json() == rule_based_segments.from_raw(rbsegments_json[0]['segment1']).to_json()) + assert(rbs.to_json() == rule_based_segments.from_raw(rbsegments_json[0]).to_json()) assert(await pluggable_rbs_storage.get('not_existing') == None) @pytest.mark.asyncio @@ -1470,8 +1470,8 @@ async def test_get_segment_names(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]) + rbs2_temp = copy.deepcopy(rbsegments_json[0]) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) @@ -1483,8 +1483,8 @@ async def test_contains(self): self.mock_adapter._keys = {} for sprefix in [None, 'myprefix']: pluggable_rbs_storage = PluggableRuleBasedSegmentsStorageAsync(self.mock_adapter, prefix=sprefix) - rbs1 = rule_based_segments.from_raw(rbsegments_json[0]['segment1']) - rbs2_temp = copy.deepcopy(rbsegments_json[0]['segment1']) + rbs1 = rule_based_segments.from_raw(rbsegments_json[0]) + rbs2_temp = copy.deepcopy(rbsegments_json[0]) rbs2_temp['name'] = 'another_segment' rbs2 = rule_based_segments.from_raw(rbs2_temp) await self.mock_adapter.set(pluggable_rbs_storage._prefix.format(segment_name=rbs1.name), rbs1.to_json()) diff --git a/tests/util/test_storage_helper.py b/tests/util/test_storage_helper.py index 7608306d..7c9c04fc 100644 --- a/tests/util/test_storage_helper.py +++ b/tests/util/test_storage_helper.py @@ -1,14 +1,43 @@ """Storage Helper tests.""" import pytest -from splitio.util.storage_helper import update_feature_flag_storage, get_valid_flag_sets, combine_valid_flag_sets -from splitio.storage.inmemmory import InMemorySplitStorage -from splitio.models import splits +from splitio.util.storage_helper import update_feature_flag_storage, get_valid_flag_sets, combine_valid_flag_sets, \ + update_rule_based_segment_storage, update_rule_based_segment_storage_async, update_feature_flag_storage_async +from splitio.storage.inmemmory import InMemorySplitStorage, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync, \ + InMemorySplitStorageAsync +from splitio.models import splits, rule_based_segments from splitio.storage import FlagSetsFilter from tests.sync.test_splits_synchronizer import splits_raw as split_sample class StorageHelperTests(object): + rbs = rule_based_segments.from_raw({ + "changeNumber": 123, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":['excluded_segment'] + }, + "conditions": [ + {"matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "employees" + }, + "whitelistMatcherData": None + } + ] + }, + } + ] + }) + def test_update_feature_flag_storage(self, mocker): storage = mocker.Mock(spec=InMemorySplitStorage) split = splits.from_raw(split_sample[0]) @@ -126,4 +155,137 @@ def test_combine_valid_flag_sets(self): assert combine_valid_flag_sets(results_set) == {'set2', 'set3'} results_set = ['set1', {'set2', 'set3'}] - assert combine_valid_flag_sets(results_set) == {'set2', 'set3'} \ No newline at end of file + assert combine_valid_flag_sets(results_set) == {'set2', 'set3'} + + def test_update_rule_base_segment_storage(self, mocker): + storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + self.added = [] + self.deleted = [] + self.change_number = 0 + def update(to_add, to_delete, change_number): + self.added = to_add + self.deleted = to_delete + self.change_number = change_number + storage.update = update + + segments = update_rule_based_segment_storage(storage, [self.rbs], 123) + assert self.added[0] == self.rbs + assert self.deleted == [] + assert self.change_number == 123 + assert segments == {'excluded_segment', 'employees'} + + @pytest.mark.asyncio + async def test_update_rule_base_segment_storage_async(self, mocker): + storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + self.added = [] + self.deleted = [] + self.change_number = 0 + async def update(to_add, to_delete, change_number): + self.added = to_add + self.deleted = to_delete + self.change_number = change_number + storage.update = update + + segments = await update_rule_based_segment_storage_async(storage, [self.rbs], 123) + assert self.added[0] == self.rbs + assert self.deleted == [] + assert self.change_number == 123 + assert segments == {'excluded_segment', 'employees'} + + @pytest.mark.asyncio + async def test_update_feature_flag_storage_async(self, mocker): + storage = mocker.Mock(spec=InMemorySplitStorageAsync) + split = splits.from_raw(split_sample[0]) + + self.added = [] + self.deleted = [] + self.change_number = 0 + async def get(flag_name): + return None + storage.get = get + + async def update(to_add, to_delete, change_number): + self.added = to_add + self.deleted = to_delete + self.change_number = change_number + storage.update = update + + async def is_flag_set_exist(flag_set): + return False + storage.is_flag_set_exist = is_flag_set_exist + + class flag_set_filter(): + def should_filter(): + return False + def intersect(sets): + return True + storage.flag_set_filter = flag_set_filter + storage.flag_set_filter.flag_sets = {} + + await update_feature_flag_storage_async(storage, [split], 123) + assert self.added[0] == split + assert self.deleted == [] + assert self.change_number == 123 + + class flag_set_filter2(): + def should_filter(): + return True + def intersect(sets): + return False + storage.flag_set_filter = flag_set_filter2 + storage.flag_set_filter.flag_sets = set({'set1', 'set2'}) + + async def get(flag_name): + return split + storage.get = get + + await update_feature_flag_storage_async(storage, [split], 123) + assert self.added == [] + assert self.deleted[0] == split.name + + class flag_set_filter3(): + def should_filter(): + return True + def intersect(sets): + return True + storage.flag_set_filter = flag_set_filter3 + storage.flag_set_filter.flag_sets = set({'set1', 'set2'}) + + async def is_flag_set_exist2(flag_set): + return True + storage.is_flag_set_exist = is_flag_set_exist2 + await update_feature_flag_storage_async(storage, [split], 123) + assert self.added[0] == split + assert self.deleted == [] + + split_json = split_sample[0] + split_json['conditions'].append({ + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "segment1" + }, + "whitelistMatcherData": None + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 30 + }, + { + "treatment": "off", + "size": 70 + } + ] + } + ) + + split = splits.from_raw(split_json) + storage.config_flag_sets_used = 0 + assert await update_feature_flag_storage_async(storage, [split], 123) == {'segment1'} \ No newline at end of file From 9aa56a1372b3ccd560e1f477e1e452584bf63df7 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Thu, 1 May 2025 11:18:39 -0700 Subject: [PATCH 21/56] Added support for old spec in fetcher --- splitio/api/client.py | 31 ++++- splitio/api/splits.py | 77 +++++++++-- splitio/models/rule_based_segments.py | 8 +- splitio/storage/inmemmory.py | 36 +++++ splitio/sync/split.py | 10 +- splitio/util/storage_helper.py | 24 +++- splits.json | 1 + tests/api/test_splits_api.py | 129 ++++++++++++++++++ .../integration/files/split_changes_temp.json | 2 +- tests/models/test_rule_based_segments.py | 2 +- tests/util/test_storage_helper.py | 41 +++++- 11 files changed, 326 insertions(+), 35 deletions(-) create mode 100644 splits.json diff --git a/splitio/api/client.py b/splitio/api/client.py index 5db1cadb..d0bda3e7 100644 --- a/splitio/api/client.py +++ b/splitio/api/client.py @@ -92,6 +92,25 @@ def proxy_headers(self, proxy): class HttpClientBase(object, metaclass=abc.ABCMeta): """HttpClient wrapper template.""" + def __init__(self, timeout=None, sdk_url=None, events_url=None, auth_url=None, telemetry_url=None): + """ + Class constructor. + + :param timeout: How many milliseconds to wait until the server responds. + :type timeout: int + :param sdk_url: Optional alternative sdk URL. + :type sdk_url: str + :param events_url: Optional alternative events URL. + :type events_url: str + :param auth_url: Optional alternative auth URL. + :type auth_url: str + :param telemetry_url: Optional alternative telemetry URL. + :type telemetry_url: str + """ + _LOGGER.debug("Initializing httpclient") + self._timeout = timeout/1000 if timeout else None # Convert ms to seconds. + self._urls = _construct_urls(sdk_url, events_url, auth_url, telemetry_url) + @abc.abstractmethod def get(self, server, path, apikey): """http get request""" @@ -113,6 +132,9 @@ def set_telemetry_data(self, metric_name, telemetry_runtime_producer): self._telemetry_runtime_producer = telemetry_runtime_producer self._metric_name = metric_name + def is_sdk_endpoint_overridden(self): + return self._urls['sdk'] == SDK_URL + def _get_headers(self, extra_headers, sdk_key): headers = _build_basic_headers(sdk_key) if extra_headers is not None: @@ -154,10 +176,8 @@ def __init__(self, timeout=None, sdk_url=None, events_url=None, auth_url=None, t :param telemetry_url: Optional alternative telemetry URL. :type telemetry_url: str """ - _LOGGER.debug("Initializing httpclient") - self._timeout = timeout/1000 if timeout else None # Convert ms to seconds. - self._urls = _construct_urls(sdk_url, events_url, auth_url, telemetry_url) - + HttpClientBase.__init__(self, timeout, sdk_url, events_url, auth_url, telemetry_url) + def get(self, server, path, sdk_key, query=None, extra_headers=None): # pylint: disable=too-many-arguments """ Issue a get request. @@ -241,8 +261,7 @@ def __init__(self, timeout=None, sdk_url=None, events_url=None, auth_url=None, t :param telemetry_url: Optional alternative telemetry URL. :type telemetry_url: str """ - self._timeout = timeout/1000 if timeout else None # Convert ms to seconds. - self._urls = _construct_urls(sdk_url, events_url, auth_url, telemetry_url) + HttpClientBase.__init__(self, timeout, sdk_url, events_url, auth_url, telemetry_url) self._session = aiohttp.ClientSession() async def get(self, server, path, apikey, query=None, extra_headers=None): # pylint: disable=too-many-arguments diff --git a/splitio/api/splits.py b/splitio/api/splits.py index f013497a..4de9204a 100644 --- a/splitio/api/splits.py +++ b/splitio/api/splits.py @@ -4,14 +4,17 @@ import json from splitio.api import APIException, headers_from_metadata -from splitio.api.commons import build_fetch +from splitio.api.commons import build_fetch, FetchOptions from splitio.api.client import HttpClientException from splitio.models.telemetry import HTTPExceptionsAndLatencies +from splitio.util.time import utctime_ms +from splitio.spec import SPEC_VERSION _LOGGER = logging.getLogger(__name__) +_SPEC_1_1 = "1.1" +_PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 24 * 60 * 60 * 1000 - -class SplitsAPI(object): # pylint: disable=too-few-public-methods +class SplitsAPIBase(object): # pylint: disable=too-few-public-methods """Class that uses an httpClient to communicate with the splits API.""" def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): @@ -30,6 +33,35 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._metadata = headers_from_metadata(sdk_metadata) self._telemetry_runtime_producer = telemetry_runtime_producer self._client.set_telemetry_data(HTTPExceptionsAndLatencies.SPLIT, self._telemetry_runtime_producer) + self._spec_version = SPEC_VERSION + self._last_proxy_check_timestamp = 0 + self.clear_storage = False + + def _convert_to_new_spec(self, body): + return {"ff": {"d": body["splits"], "s": body["since"], "t": body["till"]}, + "rbs": {"d": [], "s": -1, "t": -1}} + + def _check_last_proxy_check_timestamp(self): + if self._spec_version == _SPEC_1_1 and ((utctime_ms() - self._last_proxy_check_timestamp) >= _PROXY_CHECK_INTERVAL_MILLISECONDS_SS): + _LOGGER.info("Switching to new Feature flag spec (%s) and fetching.", SPEC_VERSION); + self._spec_version = SPEC_VERSION + + +class SplitsAPI(SplitsAPIBase): # pylint: disable=too-few-public-methods + """Class that uses an httpClient to communicate with the splits API.""" + + def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): + """ + Class constructor. + + :param client: HTTP Client responsble for issuing calls to the backend. + :type client: HttpClient + :param sdk_key: User sdk_key token. + :type sdk_key: string + :param sdk_metadata: SDK version & machine name & IP. + :type sdk_metadata: splitio.client.util.SdkMetadata + """ + SplitsAPIBase.__init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer) def fetch_splits(self, change_number, rbs_change_number, fetch_options): """ @@ -48,6 +80,7 @@ def fetch_splits(self, change_number, rbs_change_number, fetch_options): :rtype: dict """ try: + self._check_last_proxy_check_timestamp() query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = self._client.get( 'sdk', @@ -57,19 +90,32 @@ def fetch_splits(self, change_number, rbs_change_number, fetch_options): query=query, ) if 200 <= response.status_code < 300: + if self._spec_version == _SPEC_1_1: + return self._convert_to_new_spec(json.loads(response.body)) + + self.clear_storage = self._last_proxy_check_timestamp != 0 + self._last_proxy_check_timestamp = 0 return json.loads(response.body) else: if response.status_code == 414: _LOGGER.error('Error fetching feature flags; the amount of flag sets provided are too big, causing uri length error.') + + if self._client.is_sdk_endpoint_overridden() and response.status_code == 400 and self._spec_version == SPEC_VERSION: + _LOGGER.warning('Detected proxy response error, changing spec version from %s to %s and re-fetching.', self._spec_version, _SPEC_1_1) + self._spec_version = _SPEC_1_1 + self._last_proxy_check_timestamp = utctime_ms() + return self.fetch_splits(change_number, None, FetchOptions(fetch_options.cache_control_headers, fetch_options.change_number, + None, fetch_options.sets, self._spec_version)) + raise APIException(response.body, response.status_code) + except HttpClientException as exc: _LOGGER.error('Error fetching feature flags because an exception was raised by the HTTPClient') _LOGGER.debug('Error: ', exc_info=True) raise APIException('Feature flags not fetched correctly.') from exc - -class SplitsAPIAsync(object): # pylint: disable=too-few-public-methods +class SplitsAPIAsync(SplitsAPIBase): # pylint: disable=too-few-public-methods """Class that uses an httpClient to communicate with the splits API.""" def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): @@ -83,11 +129,7 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): :param sdk_metadata: SDK version & machine name & IP. :type sdk_metadata: splitio.client.util.SdkMetadata """ - self._client = client - self._sdk_key = sdk_key - self._metadata = headers_from_metadata(sdk_metadata) - self._telemetry_runtime_producer = telemetry_runtime_producer - self._client.set_telemetry_data(HTTPExceptionsAndLatencies.SPLIT, self._telemetry_runtime_producer) + SplitsAPIBase.__init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer) async def fetch_splits(self, change_number, rbs_change_number, fetch_options): """ @@ -106,6 +148,7 @@ async def fetch_splits(self, change_number, rbs_change_number, fetch_options): :rtype: dict """ try: + self._check_last_proxy_check_timestamp() query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = await self._client.get( 'sdk', @@ -115,12 +158,26 @@ async def fetch_splits(self, change_number, rbs_change_number, fetch_options): query=query, ) if 200 <= response.status_code < 300: + if self._spec_version == _SPEC_1_1: + return self._convert_to_new_spec(json.loads(response.body)) + + self.clear_storage = self._last_proxy_check_timestamp != 0 + self._last_proxy_check_timestamp = 0 return json.loads(response.body) else: if response.status_code == 414: _LOGGER.error('Error fetching feature flags; the amount of flag sets provided are too big, causing uri length error.') + + if self._client.is_sdk_endpoint_overridden() and response.status_code == 400 and self._spec_version == SPEC_VERSION: + _LOGGER.warning('Detected proxy response error, changing spec version from %s to %s and re-fetching.', self._spec_version, _SPEC_1_1) + self._spec_version = _SPEC_1_1 + self._last_proxy_check_timestamp = utctime_ms() + return await self.fetch_splits(change_number, None, FetchOptions(fetch_options.cache_control_headers, fetch_options.change_number, + None, fetch_options.sets, self._spec_version)) + raise APIException(response.body, response.status_code) + except HttpClientException as exc: _LOGGER.error('Error fetching feature flags because an exception was raised by the HTTPClient') _LOGGER.debug('Error: ', exc_info=True) diff --git a/splitio/models/rule_based_segments.py b/splitio/models/rule_based_segments.py index f611a792..5914983c 100644 --- a/splitio/models/rule_based_segments.py +++ b/splitio/models/rule_based_segments.py @@ -5,6 +5,7 @@ from splitio.models import MatcherNotFoundException from splitio.models.splits import _DEFAULT_CONDITIONS_TEMPLATE from splitio.models.grammar import condition +from splitio.models.splits import Status _LOGGER = logging.getLogger(__name__) @@ -31,9 +32,12 @@ def __init__(self, name, traffic_type_name, change_number, status, conditions, e self._name = name self._traffic_type_name = traffic_type_name self._change_number = change_number - self._status = status self._conditions = conditions self._excluded = excluded + try: + self._status = Status(status) + except ValueError: + self._status = Status.ARCHIVED @property def name(self): @@ -71,7 +75,7 @@ def to_json(self): 'changeNumber': self.change_number, 'trafficTypeName': self.traffic_type_name, 'name': self.name, - 'status': self.status, + 'status': self.status.value, 'conditions': [c.to_json() for c in self.conditions], 'excluded': self.excluded.to_json() } diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index c3fb09ec..817e7d86 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -116,6 +116,14 @@ def __init__(self): self._rule_based_segments = {} self._change_number = -1 + def clear(self): + """ + Clear storage + """ + with self._lock: + self._rule_based_segments = {} + self._change_number = -1 + def get(self, segment_name): """ Retrieve a rule based segment. @@ -231,6 +239,14 @@ def __init__(self): self._rule_based_segments = {} self._change_number = -1 + async def clear(self): + """ + Clear storage + """ + with self._lock: + self._rule_based_segments = {} + self._change_number = -1 + async def get(self, segment_name): """ Retrieve a rule based segment. @@ -466,6 +482,16 @@ def __init__(self, flag_sets=[]): self.flag_set = FlagSets(flag_sets) self.flag_set_filter = FlagSetsFilter(flag_sets) + def clear(self): + """ + Clear storage + """ + with self._lock: + self._feature_flags = {} + self._change_number = -1 + self._traffic_types = Counter() + self.flag_set = FlagSets(self.flag_set_filter.flag_sets) + def get(self, feature_flag_name): """ Retrieve a feature flag. @@ -672,6 +698,16 @@ def __init__(self, flag_sets=[]): self.flag_set = FlagSets(flag_sets) self.flag_set_filter = FlagSetsFilter(flag_sets) + async def clear(self): + """ + Clear storage + """ + with self._lock: + self._feature_flags = {} + self._change_number = -1 + self._traffic_types = Counter() + self.flag_set = FlagSets(self.flag_set_filter.flag_sets) + async def get(self, feature_flag_name): """ Retrieve a feature flag. diff --git a/splitio/sync/split.py b/splitio/sync/split.py index fa7562d0..3a16068b 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -135,10 +135,10 @@ def _fetch_until(self, fetch_options, till=None, rbs_till=None): raise exc fetched_rule_based_segments = [(rule_based_segments.from_raw(rule_based_segment)) for rule_based_segment in feature_flag_changes.get('rbs').get('d', [])] - rbs_segment_list = update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t']) + rbs_segment_list = update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t'], self._api.clear_storage) fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t']) + segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t'], self._api.clear_storage) segment_list.update(rbs_segment_list) if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: @@ -294,10 +294,10 @@ async def _fetch_until(self, fetch_options, till=None, rbs_till=None): raise exc fetched_rule_based_segments = [(rule_based_segments.from_raw(rule_based_segment)) for rule_based_segment in feature_flag_changes.get('rbs').get('d', [])] - rbs_segment_list = await update_rule_based_segment_storage_async(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t']) + rbs_segment_list = await update_rule_based_segment_storage_async(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t'], self._api.clear_storage) fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] - segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t']) + segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t'], self._api.clear_storage) segment_list.update(rbs_segment_list) if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: @@ -541,7 +541,7 @@ def _sanitize_rb_segment_elements(self, parsed_rb_segments): _LOGGER.warning("A rule based segment in json file does not have (Name) or property is empty, skipping.") continue for element in [('trafficTypeName', 'user', None, None, None, None), - ('status', 'ACTIVE', None, None, ['ACTIVE', 'ARCHIVED'], None), + ('status', splits.Status.ACTIVE, None, None, [splits.Status.ACTIVE, splits.Status.ARCHIVED], None), ('changeNumber', 0, 0, None, None, None)]: rb_segment = util._sanitize_object_element(rb_segment, 'rule based segment', element[0], element[1], lower_value=element[2], upper_value=element[3], in_list=element[4], not_in_list=element[5]) rb_segment = self._sanitize_condition(rb_segment) diff --git a/splitio/util/storage_helper.py b/splitio/util/storage_helper.py index 699f4871..d1c37f92 100644 --- a/splitio/util/storage_helper.py +++ b/splitio/util/storage_helper.py @@ -4,7 +4,7 @@ _LOGGER = logging.getLogger(__name__) -def update_feature_flag_storage(feature_flag_storage, feature_flags, change_number): +def update_feature_flag_storage(feature_flag_storage, feature_flags, change_number, clear_storage=False): """ Update feature flag storage from given list of feature flags while checking the flag set logic @@ -21,6 +21,9 @@ def update_feature_flag_storage(feature_flag_storage, feature_flags, change_numb segment_list = set() to_add = [] to_delete = [] + if clear_storage: + feature_flag_storage.clear() + for feature_flag in feature_flags: if feature_flag_storage.flag_set_filter.intersect(feature_flag.sets) and feature_flag.status == splits.Status.ACTIVE: to_add.append(feature_flag) @@ -32,7 +35,7 @@ def update_feature_flag_storage(feature_flag_storage, feature_flags, change_numb feature_flag_storage.update(to_add, to_delete, change_number) return segment_list -def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_segments, change_number): +def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_segments, change_number, clear_storage=False): """ Update rule based segment storage from given list of rule based segments @@ -46,11 +49,14 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg :return: segments list from excluded segments list :rtype: list(str) """ + if clear_storage: + rule_based_segment_storage.clear() + segment_list = set() to_add = [] to_delete = [] for rule_based_segment in rule_based_segments: - if rule_based_segment.status == "ACTIVE": + if rule_based_segment.status == splits.Status.ACTIVE: to_add.append(rule_based_segment) segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) segment_list.update(rule_based_segment.get_condition_segment_names()) @@ -61,7 +67,7 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg rule_based_segment_storage.update(to_add, to_delete, change_number) return segment_list -async def update_feature_flag_storage_async(feature_flag_storage, feature_flags, change_number): +async def update_feature_flag_storage_async(feature_flag_storage, feature_flags, change_number, clear_storage=False): """ Update feature flag storage from given list of feature flags while checking the flag set logic @@ -75,6 +81,9 @@ async def update_feature_flag_storage_async(feature_flag_storage, feature_flags, :return: segments list from feature flags list :rtype: list(str) """ + if clear_storage: + await feature_flag_storage.clear() + segment_list = set() to_add = [] to_delete = [] @@ -89,7 +98,7 @@ async def update_feature_flag_storage_async(feature_flag_storage, feature_flags, await feature_flag_storage.update(to_add, to_delete, change_number) return segment_list -async def update_rule_based_segment_storage_async(rule_based_segment_storage, rule_based_segments, change_number): +async def update_rule_based_segment_storage_async(rule_based_segment_storage, rule_based_segments, change_number, clear_storage=False): """ Update rule based segment storage from given list of rule based segments @@ -103,11 +112,14 @@ async def update_rule_based_segment_storage_async(rule_based_segment_storage, ru :return: segments list from excluded segments list :rtype: list(str) """ + if clear_storage: + await rule_based_segment_storage.clear() + segment_list = set() to_add = [] to_delete = [] for rule_based_segment in rule_based_segments: - if rule_based_segment.status == "ACTIVE": + if rule_based_segment.status == splits.Status.ACTIVE: to_add.append(rule_based_segment) segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) segment_list.update(rule_based_segment.get_condition_segment_names()) diff --git a/splits.json b/splits.json new file mode 100644 index 00000000..67bd4fbe --- /dev/null +++ b/splits.json @@ -0,0 +1 @@ +{"ff": {"t": -1, "s": -1, "d": [{"changeNumber": 123, "trafficTypeName": "user", "name": "third_split", "trafficAllocation": 100, "trafficAllocationSeed": 123456, "seed": 321654, "status": "ACTIVE", "killed": true, "defaultTreatment": "off", "algo": 2, "conditions": [{"partitions": [{"treatment": "on", "size": 50}, {"treatment": "off", "size": 50}], "contitionType": "WHITELIST", "label": "some_label", "matcherGroup": {"matchers": [{"matcherType": "WHITELIST", "whitelistMatcherData": {"whitelist": ["k1", "k2", "k3"]}, "negate": false}], "combiner": "AND"}}, {"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "IN_RULE_BASED_SEGMENT", "negate": false, "userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "in rule based segment sample_rule_based_segment"}], "sets": ["set6"]}]}, "rbs": {"t": 1675095324253, "s": -1, "d": [{"changeNumber": 5, "name": "sample_rule_based_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": ["mauro@split.io", "gaston@split.io"], "segments": []}, "conditions": [{"matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": "email"}, "matcherType": "ENDS_WITH", "negate": false, "whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]}]}} \ No newline at end of file diff --git a/tests/api/test_splits_api.py b/tests/api/test_splits_api.py index af9819ea..bfb45c16 100644 --- a/tests/api/test_splits_api.py +++ b/tests/api/test_splits_api.py @@ -2,6 +2,7 @@ import pytest import unittest.mock as mock +import time from splitio.api import splits, client, APIException from splitio.api.commons import FetchOptions @@ -59,7 +60,69 @@ def raise_exception(*args, **kwargs): assert exc_info.type == APIException assert exc_info.value.message == 'some_message' + def test_old_spec(self, mocker): + """Test old split changes fetching API call.""" + httpclient = mocker.Mock(spec=client.HttpClient) + self.counter = 0 + self.query = [] + def get(sdk, splitChanges, sdk_key, extra_headers, query): + self.counter += 1 + self.query.append(query) + if self.counter == 1: + return client.HttpResponse(400, 'error', {}) + if self.counter == 2: + return client.HttpResponse(200, '{"splits": [], "since": 123, "till": 456}', {}) + + httpclient.get = get + split_api = splits.SplitsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) + + httpclient.is_sdk_endpoint_overridden.return_value = False + try: + response = split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + except Exception as e: + print(e) + + # no attempt to fetch old spec + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}] + + httpclient.is_sdk_endpoint_overridden.return_value = True + self.query = [] + self.counter = 0 + response = split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] + assert not split_api.clear_storage + + def test_switch_to_new_spec(self, mocker): + """Test old split changes fetching API call.""" + httpclient = mocker.Mock(spec=client.HttpClient) + self.counter = 0 + self.query = [] + def get(sdk, splitChanges, sdk_key, extra_headers, query): + self.counter += 1 + self.query.append(query) + if self.counter == 1: + return client.HttpResponse(400, 'error', {}) + if self.counter == 2: + return client.HttpResponse(200, '{"splits": [], "since": 123, "till": 456}', {}) + if self.counter == 3: + return client.HttpResponse(200, '{"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": 123, "t": -1}}', {}) + + httpclient.is_sdk_endpoint_overridden.return_value = True + httpclient.get = get + split_api = splits.SplitsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) + response = split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] + assert not split_api.clear_storage + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 10 + response = split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert self.query[2] == {'s': '1.3', 'since': 123, 'rbSince': -1} + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": 123, "t": -1}} + assert split_api.clear_storage + class SplitAPIAsyncTests(object): """Split async API test cases.""" @@ -130,3 +193,69 @@ def raise_exception(*args, **kwargs): response = await split_api.fetch_splits(123, 12, FetchOptions()) assert exc_info.type == APIException assert exc_info.value.message == 'some_message' + + @pytest.mark.asyncio + async def test_old_spec(self, mocker): + """Test old split changes fetching API call.""" + httpclient = mocker.Mock(spec=client.HttpClientAsync) + self.counter = 0 + self.query = [] + async def get(sdk, splitChanges, sdk_key, extra_headers, query): + self.counter += 1 + self.query.append(query) + if self.counter == 1: + return client.HttpResponse(400, 'error', {}) + if self.counter == 2: + return client.HttpResponse(200, '{"splits": [], "since": 123, "till": 456}', {}) + + httpclient.is_sdk_endpoint_overridden.return_value = True + httpclient.get = get + split_api = splits.SplitsAPIAsync(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) + + httpclient.is_sdk_endpoint_overridden.return_value = False + try: + response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + except Exception as e: + print(e) + + # no attempt to fetch old spec + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}] + + httpclient.is_sdk_endpoint_overridden.return_value = True + self.query = [] + self.counter = 0 + response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] + assert not split_api.clear_storage + + @pytest.mark.asyncio + async def test_switch_to_new_spec(self, mocker): + """Test old split changes fetching API call.""" + httpclient = mocker.Mock(spec=client.HttpClientAsync) + self.counter = 0 + self.query = [] + async def get(sdk, splitChanges, sdk_key, extra_headers, query): + self.counter += 1 + self.query.append(query) + if self.counter == 1: + return client.HttpResponse(400, 'error', {}) + if self.counter == 2: + return client.HttpResponse(200, '{"splits": [], "since": 123, "till": 456}', {}) + if self.counter == 3: + return client.HttpResponse(200, '{"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": 123, "t": -1}}', {}) + + httpclient.is_sdk_endpoint_overridden.return_value = True + httpclient.get = get + split_api = splits.SplitsAPIAsync(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) + response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] + assert not split_api.clear_storage + + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 10 + response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert self.query[2] == {'s': '1.3', 'since': 123, 'rbSince': -1} + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": 123, "t": -1}} + assert split_api.clear_storage diff --git a/tests/integration/files/split_changes_temp.json b/tests/integration/files/split_changes_temp.json index 64575226..24d876a4 100644 --- a/tests/integration/files/split_changes_temp.json +++ b/tests/integration/files/split_changes_temp.json @@ -1 +1 @@ -{"ff": {"t": -1, "s": -1, "d": [{"changeNumber": 10, "trafficTypeName": "user", "name": "rbs_feature_flag", "trafficAllocation": 100, "trafficAllocationSeed": 1828377380, "seed": -286617921, "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "algo": 2, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "IN_RULE_BASED_SEGMENT", "negate": false, "userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "in rule based segment sample_rule_based_segment"}, {"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "ALL_KEYS", "negate": false}]}, "partitions": [{"treatment": "on", "size": 0}, {"treatment": "off", "size": 100}], "label": "default rule"}], "configurations": {}, "sets": [], "impressionsDisabled": false}]}, "rbs": {"t": 1675259356568, "s": -1, "d": [{"changeNumber": 5, "name": "sample_rule_based_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": ["mauro@split.io", "gaston@split.io"], "segments": []}, "conditions": [{"matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": "email"}, "matcherType": "ENDS_WITH", "negate": false, "whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]}]}} \ No newline at end of file +{"ff": {"t": -1, "s": -1, "d": [{"name": "SPLIT_1", "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "configurations": {}, "conditions": []}]}, "rbs": {"t": -1, "s": -1, "d": [{"changeNumber": 12, "name": "some_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": [], "segments": []}, "conditions": []}]}} \ No newline at end of file diff --git a/tests/models/test_rule_based_segments.py b/tests/models/test_rule_based_segments.py index 9a822903..3ad36773 100644 --- a/tests/models/test_rule_based_segments.py +++ b/tests/models/test_rule_based_segments.py @@ -47,7 +47,7 @@ def test_from_raw(self): assert isinstance(parsed, rule_based_segments.RuleBasedSegment) assert parsed.change_number == 123 assert parsed.name == 'sample_rule_based_segment' - assert parsed.status == 'ACTIVE' + assert parsed.status == splits.Status.ACTIVE assert len(parsed.conditions) == 1 assert parsed.excluded.get_excluded_keys() == ["mauro@split.io","gaston@split.io"] assert parsed.excluded.get_excluded_segments() == [] diff --git a/tests/util/test_storage_helper.py b/tests/util/test_storage_helper.py index 7c9c04fc..ee5fe318 100644 --- a/tests/util/test_storage_helper.py +++ b/tests/util/test_storage_helper.py @@ -63,10 +63,16 @@ def intersect(sets): storage.flag_set_filter = flag_set_filter storage.flag_set_filter.flag_sets = {} - update_feature_flag_storage(storage, [split], 123) + self.clear = 0 + def clear(): + self.clear += 1 + storage.clear = clear + + update_feature_flag_storage(storage, [split], 123, True) assert self.added[0] == split assert self.deleted == [] assert self.change_number == 123 + assert self.clear == 1 class flag_set_filter2(): def should_filter(): @@ -76,9 +82,11 @@ def intersect(sets): storage.flag_set_filter = flag_set_filter2 storage.flag_set_filter.flag_sets = set({'set1', 'set2'}) + self.clear = 0 update_feature_flag_storage(storage, [split], 123) assert self.added == [] assert self.deleted[0] == split.name + assert self.clear == 0 class flag_set_filter3(): def should_filter(): @@ -167,12 +175,21 @@ def update(to_add, to_delete, change_number): self.deleted = to_delete self.change_number = change_number storage.update = update - + + self.clear = 0 + def clear(): + self.clear += 1 + storage.clear = clear + segments = update_rule_based_segment_storage(storage, [self.rbs], 123) assert self.added[0] == self.rbs assert self.deleted == [] assert self.change_number == 123 assert segments == {'excluded_segment', 'employees'} + assert self.clear == 0 + + segments = update_rule_based_segment_storage(storage, [self.rbs], 123, True) + assert self.clear == 1 @pytest.mark.asyncio async def test_update_rule_base_segment_storage_async(self, mocker): @@ -186,12 +203,20 @@ async def update(to_add, to_delete, change_number): self.change_number = change_number storage.update = update + self.clear = 0 + async def clear(): + self.clear += 1 + storage.clear = clear + segments = await update_rule_based_segment_storage_async(storage, [self.rbs], 123) assert self.added[0] == self.rbs assert self.deleted == [] assert self.change_number == 123 assert segments == {'excluded_segment', 'employees'} - + + segments = await update_rule_based_segment_storage_async(storage, [self.rbs], 123, True) + assert self.clear == 1 + @pytest.mark.asyncio async def test_update_feature_flag_storage_async(self, mocker): storage = mocker.Mock(spec=InMemorySplitStorageAsync) @@ -222,10 +247,16 @@ def intersect(sets): storage.flag_set_filter = flag_set_filter storage.flag_set_filter.flag_sets = {} - await update_feature_flag_storage_async(storage, [split], 123) + self.clear = 0 + async def clear(): + self.clear += 1 + storage.clear = clear + + await update_feature_flag_storage_async(storage, [split], 123, True) assert self.added[0] == split assert self.deleted == [] assert self.change_number == 123 + assert self.clear == 1 class flag_set_filter2(): def should_filter(): @@ -239,9 +270,11 @@ async def get(flag_name): return split storage.get = get + self.clear = 0 await update_feature_flag_storage_async(storage, [split], 123) assert self.added == [] assert self.deleted[0] == split.name + assert self.clear == 0 class flag_set_filter3(): def should_filter(): From d7b06a0f4f6cfce70df2b571e7e4561534bad8ea Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Fri, 2 May 2025 20:37:26 -0700 Subject: [PATCH 22/56] Added old spec for Localhost --- splitio/api/splits.py | 9 +- splitio/engine/evaluator.py | 35 ++++--- .../grammar/matchers/rule_based_segment.py | 18 +++- splitio/models/rule_based_segments.py | 36 +++++++- splitio/storage/inmemmory.py | 4 +- splitio/sync/split.py | 92 +++++++++++++------ splitio/sync/util.py | 4 + splitio/util/storage_helper.py | 7 +- tests/engine/test_evaluator.py | 16 ++-- .../integration/files/split_changes_temp.json | 2 +- tests/models/grammar/test_matchers.py | 4 +- tests/sync/test_splits_synchronizer.py | 29 +++++- tests/sync/test_synchronizer.py | 7 ++ tests/tasks/test_split_sync.py | 12 +++ tests/util/test_storage_helper.py | 2 +- 15 files changed, 206 insertions(+), 71 deletions(-) diff --git a/splitio/api/splits.py b/splitio/api/splits.py index 4de9204a..dcbb46f7 100644 --- a/splitio/api/splits.py +++ b/splitio/api/splits.py @@ -9,6 +9,7 @@ from splitio.models.telemetry import HTTPExceptionsAndLatencies from splitio.util.time import utctime_ms from splitio.spec import SPEC_VERSION +from splitio.sync import util _LOGGER = logging.getLogger(__name__) _SPEC_1_1 = "1.1" @@ -37,10 +38,6 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._last_proxy_check_timestamp = 0 self.clear_storage = False - def _convert_to_new_spec(self, body): - return {"ff": {"d": body["splits"], "s": body["since"], "t": body["till"]}, - "rbs": {"d": [], "s": -1, "t": -1}} - def _check_last_proxy_check_timestamp(self): if self._spec_version == _SPEC_1_1 and ((utctime_ms() - self._last_proxy_check_timestamp) >= _PROXY_CHECK_INTERVAL_MILLISECONDS_SS): _LOGGER.info("Switching to new Feature flag spec (%s) and fetching.", SPEC_VERSION); @@ -91,7 +88,7 @@ def fetch_splits(self, change_number, rbs_change_number, fetch_options): ) if 200 <= response.status_code < 300: if self._spec_version == _SPEC_1_1: - return self._convert_to_new_spec(json.loads(response.body)) + return util.convert_to_new_spec(json.loads(response.body)) self.clear_storage = self._last_proxy_check_timestamp != 0 self._last_proxy_check_timestamp = 0 @@ -159,7 +156,7 @@ async def fetch_splits(self, change_number, rbs_change_number, fetch_options): ) if 200 <= response.status_code < 300: if self._spec_version == _SPEC_1_1: - return self._convert_to_new_spec(json.loads(response.body)) + return util.convert_to_new_spec(json.loads(response.body)) self.clear_storage = self._last_proxy_check_timestamp != 0 self._last_proxy_check_timestamp = 0 diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index 3bd11512..45544d3d 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -6,11 +6,12 @@ from splitio.models.grammar.condition import ConditionType from splitio.models.grammar.matchers.misc import DependencyMatcher from splitio.models.grammar.matchers.keys import UserDefinedSegmentMatcher -from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher +from splitio.models.grammar.matchers import RuleBasedSegmentMatcher +from splitio.models.rule_based_segments import SegmentType from splitio.optional.loaders import asyncio CONTROL = 'control' -EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships', 'segment_rbs_memberships', 'segment_rbs_conditions']) +EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships', 'segment_rbs_memberships', 'segment_rbs_conditions', 'excluded_rbs_segments']) _LOGGER = logging.getLogger(__name__) @@ -130,6 +131,7 @@ def context_for(self, key, feature_names): rbs_segment_memberships = {} rbs_segment_conditions = {} + excluded_rbs_segments = {} key_membership = False segment_memberhsip = False for rbs_segment in pending_rbs_memberships: @@ -138,10 +140,14 @@ def context_for(self, key, feature_names): key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() segment_memberhsip = False - for segment_name in rbs_segment_obj.excluded.get_excluded_segments(): - if self._segment_storage.segment_contains(segment_name, key): + for excluded_segment in rbs_segment_obj.excluded.get_excluded_segments(): + if excluded_segment.type == SegmentType.STANDARD and self._segment_storage.segment_contains(excluded_segment.name, key): segment_memberhsip = True - break + + if excluded_segment.type == SegmentType.RULE_BASED: + rbs_segment = self._rbs_segment_storage.get(excluded_segment.name) + if rbs_segment is not None: + excluded_rbs_segments.update() rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) if not (segment_memberhsip or key_membership): @@ -153,7 +159,8 @@ def context_for(self, key, feature_names): for segment in pending_memberships }, rbs_segment_memberships, - rbs_segment_conditions + rbs_segment_conditions, + excluded_rbs_segments ) class AsyncEvaluationDataFactory: @@ -189,6 +196,7 @@ async def context_for(self, key, feature_names): rbs_segment_memberships = {} rbs_segment_conditions = {} + excluded_rbs_segments = {} key_membership = False segment_memberhsip = False for rbs_segment in pending_rbs_memberships: @@ -197,11 +205,15 @@ async def context_for(self, key, feature_names): key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() segment_memberhsip = False - for segment_name in rbs_segment_obj.excluded.get_excluded_segments(): - if await self._segment_storage.segment_contains(segment_name, key): + for excluded_segment in rbs_segment_obj.excluded.get_excluded_segments(): + if excluded_segment.type == SegmentType.STANDARD and await self._segment_storage.segment_contains(excluded_segment.name, key): segment_memberhsip = True - break - + + if excluded_segment.type == SegmentType.RULE_BASED: + rbs_segment = await self._rbs_segment_storage.get(excluded_segment.name) + if rbs_segment is not None: + excluded_rbs_segments.update() + rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) if not (segment_memberhsip or key_membership): rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) @@ -215,7 +227,8 @@ async def context_for(self, key, feature_names): splits, dict(zip(segment_names, segment_memberships)), rbs_segment_memberships, - rbs_segment_conditions + rbs_segment_conditions, + excluded_rbs_segments ) diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index 0e0aa665..88e84f9c 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -32,11 +32,14 @@ def _match(self, key, attributes=None, context=None): # Check if rbs segment has exclusions if context['ec'].segment_rbs_memberships.get(self._rbs_segment_name): return False - - for parsed_condition in context['ec'].segment_rbs_conditions.get(self._rbs_segment_name): - if parsed_condition.matches(key, attributes, context): + + for rbs_segment in context['ec'].excluded_rbs_segments: + if self._match_conditions(rbs_segment, key, attributes, context): return True - + + if self._match_conditions(context['ec'].segment_rbs_conditions.get(self._rbs_segment_name), key, attributes, context): + return True + return False def _add_matcher_specific_properties_to_json(self): @@ -45,4 +48,9 @@ def _add_matcher_specific_properties_to_json(self): 'userDefinedSegmentMatcherData': { 'segmentName': self._rbs_segment_name } - } \ No newline at end of file + } + + def _match_conditions(self, rbs_segment, key, attributes, context): + for parsed_condition in rbs_segment: + if parsed_condition.matches(key, attributes, context): + return True diff --git a/splitio/models/rule_based_segments.py b/splitio/models/rule_based_segments.py index 5914983c..c2f1a6f1 100644 --- a/splitio/models/rule_based_segments.py +++ b/splitio/models/rule_based_segments.py @@ -1,5 +1,6 @@ """RuleBasedSegment module.""" +from enum import Enum import logging from splitio.models import MatcherNotFoundException @@ -9,6 +10,12 @@ _LOGGER = logging.getLogger(__name__) +class SegmentType(Enum): + """Segment type.""" + + STANDARD = "standard" + RULE_BASED = "rule-based" + class RuleBasedSegment(object): """RuleBasedSegment object class.""" @@ -125,7 +132,7 @@ def __init__(self, keys, segments): :type segments: List """ self._keys = keys - self._segments = segments + self._segments = [ExcludedSegment(segment['name'], segment['type']) for segment in segments] def get_excluded_keys(self): """Return excluded keys.""" @@ -141,3 +148,30 @@ def to_json(self): 'keys': self._keys, 'segments': self._segments } + +class ExcludedSegment(object): + + def __init__(self, name, type): + """ + Class constructor. + + :param name: rule based segment name + :type name: str + :param type: segment type + :type type: str + """ + self._name = name + try: + self._type = SegmentType(type) + except ValueError: + self._type = SegmentType.STANDARD + + @property + def name(self): + """Return name.""" + return self._name + + @property + def type(self): + """Return type.""" + return self._type diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index 817e7d86..9f215eed 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -243,7 +243,7 @@ async def clear(self): """ Clear storage """ - with self._lock: + async with self._lock: self._rule_based_segments = {} self._change_number = -1 @@ -702,7 +702,7 @@ async def clear(self): """ Clear storage """ - with self._lock: + async with self._lock: self._feature_flags = {} self._change_number = -1 self._traffic_types = Counter() diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 3a16068b..dfc58811 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -15,6 +15,7 @@ from splitio.util.time import get_current_epoch_time_ms from splitio.util.storage_helper import update_feature_flag_storage, update_feature_flag_storage_async, \ update_rule_based_segment_storage, update_rule_based_segment_storage_async + from splitio.sync import util from splitio.optional.loaders import asyncio, aiofiles @@ -392,6 +393,25 @@ class LocalSplitSynchronizerBase(object): """Localhost mode feature_flag base synchronizer.""" _DEFAULT_FEATURE_FLAG_TILL = -1 + _DEFAULT_RB_SEGMENT_TILL = -1 + + def __init__(self, filename, feature_flag_storage, rule_based_segment_storage, localhost_mode=LocalhostMode.LEGACY): + """ + Class constructor. + + :param filename: File to parse feature flags from. + :type filename: str + :param feature_flag_storage: Feature flag Storage. + :type feature_flag_storage: splitio.storage.InMemorySplitStorage + :param localhost_mode: mode for localhost either JSON, YAML or LEGACY. + :type localhost_mode: splitio.sync.split.LocalhostMode + """ + self._filename = filename + self._feature_flag_storage = feature_flag_storage + self._rule_based_segment_storage = rule_based_segment_storage + self._localhost_mode = localhost_mode + self._current_ff_sha = "-1" + self._current_rbs_sha = "-1" @staticmethod def _make_feature_flag(feature_flag_name, conditions, configs=None): @@ -541,7 +561,7 @@ def _sanitize_rb_segment_elements(self, parsed_rb_segments): _LOGGER.warning("A rule based segment in json file does not have (Name) or property is empty, skipping.") continue for element in [('trafficTypeName', 'user', None, None, None, None), - ('status', splits.Status.ACTIVE, None, None, [splits.Status.ACTIVE, splits.Status.ARCHIVED], None), + ('status', splits.Status.ACTIVE.value, None, None, [e.value for e in splits.Status], None), ('changeNumber', 0, 0, None, None, None)]: rb_segment = util._sanitize_object_element(rb_segment, 'rule based segment', element[0], element[1], lower_value=element[2], upper_value=element[3], in_list=element[4], not_in_list=element[5]) rb_segment = self._sanitize_condition(rb_segment) @@ -632,6 +652,9 @@ def _convert_yaml_to_feature_flag(cls, parsed): to_return[feature_flag_name] = cls._make_feature_flag(feature_flag_name, whitelist + all_keys, configs) return to_return + def _check_exit_conditions(self, storage_cn, parsed_till, default_till): + if storage_cn > parsed_till and parsed_till != default_till: + return True class LocalSplitSynchronizer(LocalSplitSynchronizerBase): """Localhost mode feature_flag synchronizer.""" @@ -647,12 +670,8 @@ def __init__(self, filename, feature_flag_storage, rule_based_segment_storage, l :param localhost_mode: mode for localhost either JSON, YAML or LEGACY. :type localhost_mode: splitio.sync.split.LocalhostMode """ - self._filename = filename - self._feature_flag_storage = feature_flag_storage - self._rule_based_segment_storage = rule_based_segment_storage - self._localhost_mode = localhost_mode - self._current_json_sha = "-1" - + LocalSplitSynchronizerBase.__init__(self, filename, feature_flag_storage, rule_based_segment_storage, localhost_mode) + @classmethod def _read_feature_flags_from_legacy_file(cls, filename): """ @@ -744,18 +763,24 @@ def _synchronize_json(self): try: parsed = self._read_feature_flags_from_json_file(self._filename) segment_list = set() - fecthed_sha = util._get_sha(json.dumps(parsed)) - if fecthed_sha == self._current_json_sha: + fecthed_ff_sha = util._get_sha(json.dumps(parsed['ff'])) + fecthed_rbs_sha = util._get_sha(json.dumps(parsed['rbs'])) + + if fecthed_ff_sha == self._current_ff_sha and fecthed_rbs_sha == self._current_rbs_sha: return [] - self._current_json_sha = fecthed_sha - if self._feature_flag_storage.get_change_number() > parsed['ff']['t'] and parsed['ff']['t'] != self._DEFAULT_FEATURE_FLAG_TILL: + self._current_ff_sha = fecthed_ff_sha + self._current_rbs_sha = fecthed_rbs_sha + + if self._check_exit_conditions(self._feature_flag_storage.get_change_number(), parsed['ff']['t'], self._DEFAULT_FEATURE_FLAG_TILL) \ + and self._check_exit_conditions(self._rule_based_segment_storage.get_change_number(), parsed['rbs']['t'], self._DEFAULT_RB_SEGMENT_TILL): return [] - fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) + if not self._check_exit_conditions(self._feature_flag_storage.get_change_number(), parsed['ff']['t'], self._DEFAULT_FEATURE_FLAG_TILL): + fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] + segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) - if self._rule_based_segment_storage.get_change_number() <= parsed['rbs']['t'] or parsed['rbs']['t'] == self._DEFAULT_FEATURE_FLAG_TILL: + if not self._check_exit_conditions(self._rule_based_segment_storage.get_change_number(), parsed['rbs']['t'], self._DEFAULT_RB_SEGMENT_TILL): fetched_rb_segments = [rule_based_segments.from_raw(rb_segment) for rb_segment in parsed['rbs']['d']] segment_list.update(update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rb_segments, parsed['rbs']['t'])) @@ -763,7 +788,7 @@ def _synchronize_json(self): except Exception as exc: _LOGGER.debug('Exception: ', exc_info=True) - raise ValueError("Error reading feature flags from json.") from exc + raise ValueError("Error reading feature flags from json.") from exc def _read_feature_flags_from_json_file(self, filename): """ @@ -778,6 +803,11 @@ def _read_feature_flags_from_json_file(self, filename): try: with open(filename, 'r') as flo: parsed = json.load(flo) + + # check if spec version is old + if parsed.get('splits'): + parsed = util.convert_to_new_spec(parsed) + santitized = self._sanitize_json_elements(parsed) santitized['ff']['d'] = self._sanitize_feature_flag_elements(santitized['ff']['d']) santitized['rbs']['d'] = self._sanitize_rb_segment_elements(santitized['rbs']['d']) @@ -787,7 +817,6 @@ def _read_feature_flags_from_json_file(self, filename): _LOGGER.debug('Exception: ', exc_info=True) raise ValueError("Error parsing file %s. Make sure it's readable." % filename) from exc - class LocalSplitSynchronizerAsync(LocalSplitSynchronizerBase): """Localhost mode async feature_flag synchronizer.""" @@ -802,11 +831,7 @@ def __init__(self, filename, feature_flag_storage, rule_based_segment_storage, l :param localhost_mode: mode for localhost either JSON, YAML or LEGACY. :type localhost_mode: splitio.sync.split.LocalhostMode """ - self._filename = filename - self._feature_flag_storage = feature_flag_storage - self._rule_based_segment_storage = rule_based_segment_storage - self._localhost_mode = localhost_mode - self._current_json_sha = "-1" + LocalSplitSynchronizerBase.__init__(self, filename, feature_flag_storage, rule_based_segment_storage, localhost_mode) @classmethod async def _read_feature_flags_from_legacy_file(cls, filename): @@ -900,18 +925,24 @@ async def _synchronize_json(self): try: parsed = await self._read_feature_flags_from_json_file(self._filename) segment_list = set() - fecthed_sha = util._get_sha(json.dumps(parsed)) - if fecthed_sha == self._current_json_sha: + fecthed_ff_sha = util._get_sha(json.dumps(parsed['ff'])) + fecthed_rbs_sha = util._get_sha(json.dumps(parsed['rbs'])) + + if fecthed_ff_sha == self._current_ff_sha and fecthed_rbs_sha == self._current_rbs_sha: return [] - self._current_json_sha = fecthed_sha - if await self._feature_flag_storage.get_change_number() > parsed['ff']['t'] and parsed['ff']['t'] != self._DEFAULT_FEATURE_FLAG_TILL: + self._current_ff_sha = fecthed_ff_sha + self._current_rbs_sha = fecthed_rbs_sha + + if self._check_exit_conditions(await self._feature_flag_storage.get_change_number(), parsed['ff']['t'], self._DEFAULT_FEATURE_FLAG_TILL) \ + and self._check_exit_conditions(await self._rule_based_segment_storage.get_change_number(), parsed['rbs']['t'], self._DEFAULT_RB_SEGMENT_TILL): return [] - fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] - segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) + if not self._check_exit_conditions(await self._feature_flag_storage.get_change_number(), parsed['ff']['t'], self._DEFAULT_FEATURE_FLAG_TILL): + fetched_feature_flags = [splits.from_raw(feature_flag) for feature_flag in parsed['ff']['d']] + segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, parsed['ff']['t']) - if await self._rule_based_segment_storage.get_change_number() <= parsed['rbs']['t'] or parsed['rbs']['t'] == self._DEFAULT_FEATURE_FLAG_TILL: + if not self._check_exit_conditions(await self._rule_based_segment_storage.get_change_number(), parsed['rbs']['t'], self._DEFAULT_RB_SEGMENT_TILL): fetched_rb_segments = [rule_based_segments.from_raw(rb_segment) for rb_segment in parsed['rbs']['d']] segment_list.update(await update_rule_based_segment_storage_async(self._rule_based_segment_storage, fetched_rb_segments, parsed['rbs']['t'])) @@ -934,6 +965,11 @@ async def _read_feature_flags_from_json_file(self, filename): try: async with aiofiles.open(filename, 'r') as flo: parsed = json.loads(await flo.read()) + + # check if spec version is old + if parsed.get('splits'): + parsed = util.convert_to_new_spec(parsed) + santitized = self._sanitize_json_elements(parsed) santitized['ff']['d'] = self._sanitize_feature_flag_elements(santitized['ff']['d']) santitized['rbs']['d'] = self._sanitize_rb_segment_elements(santitized['rbs']['d']) diff --git a/splitio/sync/util.py b/splitio/sync/util.py index 07ec5f24..cd32d2c2 100644 --- a/splitio/sync/util.py +++ b/splitio/sync/util.py @@ -62,3 +62,7 @@ def _sanitize_object_element(object, object_name, element_name, default_value, l _LOGGER.debug("Sanitized element [%s] to '%s' in %s: %s.", element_name, default_value, object_name, object['name']) return object + +def convert_to_new_spec(body): + return {"ff": {"d": body["splits"], "s": body["since"], "t": body["till"]}, + "rbs": {"d": [], "s": -1, "t": -1}} diff --git a/splitio/util/storage_helper.py b/splitio/util/storage_helper.py index d1c37f92..ad5d93eb 100644 --- a/splitio/util/storage_helper.py +++ b/splitio/util/storage_helper.py @@ -58,7 +58,7 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg for rule_based_segment in rule_based_segments: if rule_based_segment.status == splits.Status.ACTIVE: to_add.append(rule_based_segment) - segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) + segment_list.update(set(_get_segment_names(rule_based_segment.excluded.get_excluded_segments()))) segment_list.update(rule_based_segment.get_condition_segment_names()) else: if rule_based_segment_storage.get(rule_based_segment.name) is not None: @@ -67,6 +67,9 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg rule_based_segment_storage.update(to_add, to_delete, change_number) return segment_list +def _get_segment_names(excluded_segments): + return [excluded_segment.name for excluded_segment in excluded_segments] + async def update_feature_flag_storage_async(feature_flag_storage, feature_flags, change_number, clear_storage=False): """ Update feature flag storage from given list of feature flags while checking the flag set logic @@ -121,7 +124,7 @@ async def update_rule_based_segment_storage_async(rule_based_segment_storage, ru for rule_based_segment in rule_based_segments: if rule_based_segment.status == splits.Status.ACTIVE: to_add.append(rule_based_segment) - segment_list.update(set(rule_based_segment.excluded.get_excluded_segments())) + segment_list.update(set(_get_segment_names(rule_based_segment.excluded.get_excluded_segments()))) segment_list.update(rule_based_segment.get_condition_segment_names()) else: if await rule_based_segment_storage.get(rule_based_segment.name) is not None: diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index de8f9325..6d160a9e 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -124,7 +124,7 @@ def test_evaluate_treatment_killed_split(self, mocker): mocked_split.killed = True mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'off' assert result['configurations'] == '{"some_property": 123}' @@ -142,7 +142,7 @@ def test_evaluate_treatment_ok(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == '{"some_property": 123}' @@ -161,7 +161,7 @@ def test_evaluate_treatment_ok_no_config(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == None @@ -188,7 +188,7 @@ def test_evaluate_treatments(self, mocker): mocked_split2.change_number = 123 mocked_split2.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) results = e.eval_many_with_context('some_key', 'some_bucketing_key', ['feature2', 'feature4'], {}, ctx) result = results['feature4'] assert result['configurations'] == None @@ -211,7 +211,7 @@ def test_get_gtreatment_for_split_no_condition_matches(self, mocker): mocked_split.change_number = '123' mocked_split.conditions = [] mocked_split.get_configurations_for = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) assert e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, ctx) == ( 'off', Label.NO_CONDITION_MATCHED @@ -228,7 +228,7 @@ def test_get_gtreatment_for_split_non_rollout(self, mocker): mocked_split = mocker.Mock(spec=Split) mocked_split.killed = False mocked_split.conditions = [mocked_condition_1] - treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None, None)) + treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None, None, None)) assert treatment == 'on' assert label == 'some_label' @@ -237,11 +237,11 @@ def test_evaluate_treatment_with_rule_based_segment(self, mocker): e = evaluator.Evaluator(splitters.Splitter()) mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': False}, segment_rbs_conditions={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw).conditions}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': False}, segment_rbs_conditions={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw).conditions}, excluded_rbs_segments={}) result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) assert result['treatment'] == 'on' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': True}, segment_rbs_conditions={'sample_rule_based_segment': []}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': True}, segment_rbs_conditions={'sample_rule_based_segment': []}, excluded_rbs_segments={}) result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) assert result['treatment'] == 'off' diff --git a/tests/integration/files/split_changes_temp.json b/tests/integration/files/split_changes_temp.json index 24d876a4..64575226 100644 --- a/tests/integration/files/split_changes_temp.json +++ b/tests/integration/files/split_changes_temp.json @@ -1 +1 @@ -{"ff": {"t": -1, "s": -1, "d": [{"name": "SPLIT_1", "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "configurations": {}, "conditions": []}]}, "rbs": {"t": -1, "s": -1, "d": [{"changeNumber": 12, "name": "some_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": [], "segments": []}, "conditions": []}]}} \ No newline at end of file +{"ff": {"t": -1, "s": -1, "d": [{"changeNumber": 10, "trafficTypeName": "user", "name": "rbs_feature_flag", "trafficAllocation": 100, "trafficAllocationSeed": 1828377380, "seed": -286617921, "status": "ACTIVE", "killed": false, "defaultTreatment": "off", "algo": 2, "conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "IN_RULE_BASED_SEGMENT", "negate": false, "userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "in rule based segment sample_rule_based_segment"}, {"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "ALL_KEYS", "negate": false}]}, "partitions": [{"treatment": "on", "size": 0}, {"treatment": "off", "size": 100}], "label": "default rule"}], "configurations": {}, "sets": [], "impressionsDisabled": false}]}, "rbs": {"t": 1675259356568, "s": -1, "d": [{"changeNumber": 5, "name": "sample_rule_based_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": ["mauro@split.io", "gaston@split.io"], "segments": []}, "conditions": [{"matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": "email"}, "matcherType": "ENDS_WITH", "negate": false, "whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]}]}} \ No newline at end of file diff --git a/tests/models/grammar/test_matchers.py b/tests/models/grammar/test_matchers.py index 12de99e8..d4d09aae 100644 --- a/tests/models/grammar/test_matchers.py +++ b/tests/models/grammar/test_matchers.py @@ -404,9 +404,9 @@ def test_matcher_behaviour(self, mocker): matcher = matchers.UserDefinedSegmentMatcher(self.raw) # Test that if the key if the storage wrapper finds the key in the segment, it matches. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True}, {}, {})}) is True + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True}, {}, {}, {})}) is True # Test that if the key if the storage wrapper doesn't find the key in the segment, it fails. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False}, {}, {})}) is False + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False}, {}, {}, {})}) is False def test_to_json(self): """Test that the object serializes to JSON properly.""" diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index 3afb1f0d..c0ea38fb 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -499,7 +499,7 @@ class SplitsSynchronizerAsyncTests(object): async def test_synchronize_splits_error(self, mocker): """Test that if fetching splits fails at some_point, the task will continue running.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) api = mocker.Mock() async def run(x, y, c): @@ -531,7 +531,7 @@ def intersect(sets): async def test_synchronize_splits(self, mocker): """Test split sync.""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) async def change_number_mock(): change_number_mock._calls += 1 @@ -571,6 +571,16 @@ async def update(parsed_rbs, deleted, chanhe_number): self.parsed_rbs = parsed_rbs rbs_storage.update = update + self.clear = False + async def clear(): + self.clear = True + storage.clear = clear + + self.clear2 = False + async def clear(): + self.clear2 = True + rbs_storage.clear = clear + api = mocker.Mock() self.change_number_1 = None self.fetch_options_1 = None @@ -599,6 +609,7 @@ async def get_changes(change_number, rbs_change_number, fetch_options): } get_changes.called = 0 api.fetch_splits = get_changes + api.clear_storage.return_value = False split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) await split_synchronizer.synchronize_splits() @@ -618,7 +629,7 @@ async def get_changes(change_number, rbs_change_number, fetch_options): async def test_not_called_on_till(self, mocker): """Test that sync is not called when till is less than previous changenumber""" storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) class flag_set_filter(): def should_filter(): @@ -651,7 +662,7 @@ async def test_synchronize_splits_cdn(self, mocker): """Test split sync with bypassing cdn.""" mocker.patch('splitio.sync.split._ON_DEMAND_FETCH_BACKOFF_MAX_RETRIES', new=3) storage = mocker.Mock(spec=InMemorySplitStorageAsync) - rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) async def change_number_mock(): change_number_mock._calls += 1 if change_number_mock._calls == 1: @@ -741,6 +752,16 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] + self.clear = False + async def clear(): + self.clear = True + storage.clear = clear + + self.clear2 = False + async def clear(): + self.clear2 = True + rbs_storage.clear = clear + split_synchronizer = SplitSynchronizerAsync(api, storage, rbs_storage) split_synchronizer._backoff = Backoff(1, 1) await split_synchronizer.synchronize_splits() diff --git a/tests/sync/test_synchronizer.py b/tests/sync/test_synchronizer.py index 42985e4c..6c850dd5 100644 --- a/tests/sync/test_synchronizer.py +++ b/tests/sync/test_synchronizer.py @@ -203,6 +203,13 @@ def intersect(sets): mocker.Mock(), mocker.Mock()) synchronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) +# pytest.set_trace() + self.clear = False + def clear(): + self.clear = True + split_storage.clear = clear + rbs_storage.clear = clear + synchronizer.sync_all() inserted_split = split_storage.update.mock_calls[0][1][0][0] diff --git a/tests/tasks/test_split_sync.py b/tests/tasks/test_split_sync.py index c1ec3620..c9a0c692 100644 --- a/tests/tasks/test_split_sync.py +++ b/tests/tasks/test_split_sync.py @@ -73,6 +73,12 @@ def intersect(sets): storage.flag_set_filter.flag_sets = {} storage.flag_set_filter.sorted_flag_sets = [] + self.clear = False + def clear(): + self.clear = True + storage.clear = clear + rbs_storage.clear = clear + api = mocker.Mock() def get_changes(*args, **kwargs): @@ -172,6 +178,12 @@ async def set_change_number(*_): pass change_number_mock._calls = 0 storage.set_change_number = set_change_number + + self.clear = False + async def clear(): + self.clear = True + storage.clear = clear + rbs_storage.clear = clear api = mocker.Mock() self.change_number = [] diff --git a/tests/util/test_storage_helper.py b/tests/util/test_storage_helper.py index ee5fe318..1dab0d01 100644 --- a/tests/util/test_storage_helper.py +++ b/tests/util/test_storage_helper.py @@ -18,7 +18,7 @@ class StorageHelperTests(object): "trafficTypeName": "user", "excluded":{ "keys":["mauro@split.io","gaston@split.io"], - "segments":['excluded_segment'] + "segments":[{"name":"excluded_segment", "type": "standard"}] }, "conditions": [ {"matcherGroup": { From 5530baa0135704dbb41886934b6c07a6a54e7e89 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 5 May 2025 14:12:32 -0700 Subject: [PATCH 23/56] polish and integration tests --- splitio/api/client.py | 2 +- splitio/engine/evaluator.py | 12 +- .../grammar/matchers/rule_based_segment.py | 6 +- splitio/models/rule_based_segments.py | 10 + splits.json | 1 - tests/engine/files/rule_base_segments.json | 61 +++ tests/engine/files/rule_base_segments2.json | 63 +++ tests/engine/files/rule_base_segments3.json | 35 ++ tests/engine/test_evaluator.py | 71 ++++ tests/helpers/mockserver.py | 21 +- tests/integration/files/split_old_spec.json | 328 ++++++++++++++++ tests/integration/test_client_e2e.py | 367 +++++++++++++++++- tests/models/grammar/test_matchers.py | 44 +++ tests/models/test_rule_based_segments.py | 5 +- 14 files changed, 1007 insertions(+), 19 deletions(-) delete mode 100644 splits.json create mode 100644 tests/engine/files/rule_base_segments.json create mode 100644 tests/engine/files/rule_base_segments2.json create mode 100644 tests/engine/files/rule_base_segments3.json create mode 100644 tests/integration/files/split_old_spec.json diff --git a/splitio/api/client.py b/splitio/api/client.py index d0bda3e7..5d3ef6f4 100644 --- a/splitio/api/client.py +++ b/splitio/api/client.py @@ -133,7 +133,7 @@ def set_telemetry_data(self, metric_name, telemetry_runtime_producer): self._metric_name = metric_name def is_sdk_endpoint_overridden(self): - return self._urls['sdk'] == SDK_URL + return self._urls['sdk'] != SDK_URL def _get_headers(self, extra_headers, sdk_key): headers = _build_basic_headers(sdk_key) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index 45544d3d..4306dff2 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -125,13 +125,19 @@ def context_for(self, key, feature_names): pending = set() for feature in features.values(): cf, cs, crbs = get_dependencies(feature) + for rbs in crbs: + rbs_cf, rbs_cs, rbs_crbs = get_dependencies(self._rbs_segment_storage.get(rbs)) + cf.extend(rbs_cf) + cs.extend(rbs_cs) + crbs.extend(rbs_crbs) + pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) pending_rbs_memberships.update(crbs) rbs_segment_memberships = {} rbs_segment_conditions = {} - excluded_rbs_segments = {} + excluded_rbs_segments = set() key_membership = False segment_memberhsip = False for rbs_segment in pending_rbs_memberships: @@ -147,7 +153,7 @@ def context_for(self, key, feature_names): if excluded_segment.type == SegmentType.RULE_BASED: rbs_segment = self._rbs_segment_storage.get(excluded_segment.name) if rbs_segment is not None: - excluded_rbs_segments.update() + excluded_rbs_segments.add(rbs_segment) rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) if not (segment_memberhsip or key_membership): @@ -189,7 +195,7 @@ async def context_for(self, key, feature_names): splits.update(features) pending = set() for feature in features.values(): - cf, cs, crbs = get_dependencies(feature) + cf, cs, crbs = get_dependencies(feature) pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) pending_rbs_memberships.update(crbs) diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index 88e84f9c..30fff738 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -34,7 +34,7 @@ def _match(self, key, attributes=None, context=None): return False for rbs_segment in context['ec'].excluded_rbs_segments: - if self._match_conditions(rbs_segment, key, attributes, context): + if self._match_conditions(rbs_segment.conditions, key, attributes, context): return True if self._match_conditions(context['ec'].segment_rbs_conditions.get(self._rbs_segment_name), key, attributes, context): @@ -50,7 +50,7 @@ def _add_matcher_specific_properties_to_json(self): } } - def _match_conditions(self, rbs_segment, key, attributes, context): - for parsed_condition in rbs_segment: + def _match_conditions(self, rbs_segment_conditions, key, attributes, context): + for parsed_condition in rbs_segment_conditions: if parsed_condition.matches(key, attributes, context): return True diff --git a/splitio/models/rule_based_segments.py b/splitio/models/rule_based_segments.py index c2f1a6f1..dd964055 100644 --- a/splitio/models/rule_based_segments.py +++ b/splitio/models/rule_based_segments.py @@ -111,6 +111,16 @@ def from_raw(raw_rule_based_segment): _LOGGER.error(str(e)) _LOGGER.debug("Using default conditions template for feature flag: %s", raw_rule_based_segment['name']) conditions = [condition.from_raw(_DEFAULT_CONDITIONS_TEMPLATE)] + + if raw_rule_based_segment.get('excluded') == None: + raw_rule_based_segment['excluded'] = {'keys': [], 'segments': []} + + if raw_rule_based_segment['excluded'].get('keys') == None: + raw_rule_based_segment['excluded']['keys'] = [] + + if raw_rule_based_segment['excluded'].get('segments') == None: + raw_rule_based_segment['excluded']['segments'] = [] + return RuleBasedSegment( raw_rule_based_segment['name'], raw_rule_based_segment['trafficTypeName'], diff --git a/splits.json b/splits.json deleted file mode 100644 index 67bd4fbe..00000000 --- a/splits.json +++ /dev/null @@ -1 +0,0 @@ -{"ff": {"t": -1, "s": -1, "d": [{"changeNumber": 123, "trafficTypeName": "user", "name": "third_split", "trafficAllocation": 100, "trafficAllocationSeed": 123456, "seed": 321654, "status": "ACTIVE", "killed": true, "defaultTreatment": "off", "algo": 2, "conditions": [{"partitions": [{"treatment": "on", "size": 50}, {"treatment": "off", "size": 50}], "contitionType": "WHITELIST", "label": "some_label", "matcherGroup": {"matchers": [{"matcherType": "WHITELIST", "whitelistMatcherData": {"whitelist": ["k1", "k2", "k3"]}, "negate": false}], "combiner": "AND"}}, {"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user"}, "matcherType": "IN_RULE_BASED_SEGMENT", "negate": false, "userDefinedSegmentMatcherData": {"segmentName": "sample_rule_based_segment"}}]}, "partitions": [{"treatment": "on", "size": 100}, {"treatment": "off", "size": 0}], "label": "in rule based segment sample_rule_based_segment"}], "sets": ["set6"]}]}, "rbs": {"t": 1675095324253, "s": -1, "d": [{"changeNumber": 5, "name": "sample_rule_based_segment", "status": "ACTIVE", "trafficTypeName": "user", "excluded": {"keys": ["mauro@split.io", "gaston@split.io"], "segments": []}, "conditions": [{"matcherGroup": {"combiner": "AND", "matchers": [{"keySelector": {"trafficType": "user", "attribute": "email"}, "matcherType": "ENDS_WITH", "negate": false, "whitelistMatcherData": {"whitelist": ["@split.io"]}}]}}]}]}} \ No newline at end of file diff --git a/tests/engine/files/rule_base_segments.json b/tests/engine/files/rule_base_segments.json new file mode 100644 index 00000000..0ab3495b --- /dev/null +++ b/tests/engine/files/rule_base_segments.json @@ -0,0 +1,61 @@ +{"ff": {"d": [], "t": -1, "s": -1}, +"rbs": {"t": -1, "s": -1, "d": + [{ + "changeNumber": 5, + "name": "dependent_rbs", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{"keys":["mauro@split.io","gaston@split.io"],"segments":[]}, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ]}, + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded": { + "keys": [], + "segments": [] + }, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "dependent_rbs" + } + } + ] + } + } + ] + }] +}} diff --git a/tests/engine/files/rule_base_segments2.json b/tests/engine/files/rule_base_segments2.json new file mode 100644 index 00000000..fa2b006b --- /dev/null +++ b/tests/engine/files/rule_base_segments2.json @@ -0,0 +1,63 @@ +{"ff": {"d": [], "t": -1, "s": -1}, +"rbs": {"t": -1, "s": -1, "d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[{"type":"rule-based", "name":"no_excludes"}] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + }, + { + "changeNumber": 5, + "name": "no_excludes", + "status": "ACTIVE", + "trafficTypeName": "user", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + } +]}} diff --git a/tests/engine/files/rule_base_segments3.json b/tests/engine/files/rule_base_segments3.json new file mode 100644 index 00000000..f738f3f7 --- /dev/null +++ b/tests/engine/files/rule_base_segments3.json @@ -0,0 +1,35 @@ +{"ff": {"d": [], "t": -1, "s": -1}, +"rbs": {"t": -1, "s": -1, "d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[{"type":"standard", "name":"segment1"}] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + } +]}} diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 6d160a9e..102f3db0 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -1,9 +1,12 @@ """Evaluator tests module.""" +import json import logging +import os import pytest import copy from splitio.models.splits import Split, Status +from splitio.models import segments from splitio.models.grammar.condition import Condition, ConditionType from splitio.models.impressions import Label from splitio.models.grammar import condition @@ -245,6 +248,74 @@ def test_evaluate_treatment_with_rule_based_segment(self, mocker): result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) assert result['treatment'] == 'off' + def test_evaluate_treatment_with_rbs_in_condition(self): + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() + segment_storage = InMemorySegmentStorage() + evaluation_facctory = EvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) + rbs_storage.update([rbs, rbs2], [], 12) + splits_storage.update([mocked_split], [], 12) + + ctx = evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "off" + + + def test_using_segment_in_excluded(self): + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments3.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() + segment_storage = InMemorySegmentStorage() + evaluation_facctory = EvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs_storage.update([rbs], [], 12) + splits_storage.update([mocked_split], [], 12) + segment = segments.from_raw({'name': 'segment1', 'added': ['pato@split.io'], 'removed': [], 'till': 123}) + segment_storage.put(segment) + + ctx = evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "off" + ctx = evaluation_facctory.context_for('pato@split.io', ['some']) + assert e.eval_with_context('pato@split.io', 'pato@split.io', 'some', {'email': 'pato@split.io'}, ctx)['treatment'] == "off" + + def test_using_rbs_in_excluded(self): + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments2.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() + segment_storage = InMemorySegmentStorage() + evaluation_facctory = EvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) + rbs_storage.update([rbs, rbs2], [], 12) + splits_storage.update([mocked_split], [], 12) + + ctx = evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "on" + class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" diff --git a/tests/helpers/mockserver.py b/tests/helpers/mockserver.py index 71cd186b..8d41cfd2 100644 --- a/tests/helpers/mockserver.py +++ b/tests/helpers/mockserver.py @@ -3,12 +3,13 @@ from collections import namedtuple import queue import threading +import pytest from http.server import HTTPServer, BaseHTTPRequestHandler Request = namedtuple('Request', ['method', 'path', 'headers', 'body']) - +OLD_SPEC = False class SSEMockServer(object): """SSE server for testing purposes.""" @@ -102,19 +103,22 @@ class SplitMockServer(object): protocol_version = 'HTTP/1.1' def __init__(self, split_changes=None, segment_changes=None, req_queue=None, - auth_response=None): + auth_response=None, old_spec=False): """ Consruct a mock server. :param changes: mapping of changeNumbers to splitChanges responses :type changes: dict """ + global OLD_SPEC + OLD_SPEC = old_spec split_changes = split_changes if split_changes is not None else {} segment_changes = segment_changes if segment_changes is not None else {} self._server = HTTPServer(('localhost', 0), lambda *xs: SDKHandler(split_changes, segment_changes, *xs, req_queue=req_queue, - auth_response=auth_response)) + auth_response=auth_response, + )) self._server_thread = threading.Thread(target=self._blocking_run, name="SplitMockServer", daemon=True) self._done_event = threading.Event() @@ -148,7 +152,7 @@ def __init__(self, split_changes, segment_changes, *args, **kwargs): self._req_queue = kwargs.get('req_queue') self._auth_response = kwargs.get('auth_response') self._split_changes = split_changes - self._segment_changes = segment_changes + self._segment_changes = segment_changes BaseHTTPRequestHandler.__init__(self, *args) def _parse_qs(self): @@ -180,6 +184,15 @@ def _handle_segment_changes(self): self.wfile.write(json.dumps(to_send).encode('utf-8')) def _handle_split_changes(self): + global OLD_SPEC + if OLD_SPEC: + self.send_response(400) + self.send_header("Content-type", "application/json") + self.end_headers() + self.wfile.write('{}'.encode('utf-8')) + OLD_SPEC = False + return + qstring = self._parse_qs() since = int(qstring.get('since', -1)) to_send = self._split_changes.get(since) diff --git a/tests/integration/files/split_old_spec.json b/tests/integration/files/split_old_spec.json new file mode 100644 index 00000000..0d7edf86 --- /dev/null +++ b/tests/integration/files/split_old_spec.json @@ -0,0 +1,328 @@ +{ + "splits": [ + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "whitelist_feature", + "seed": -1222652054, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "whitelisted_user" + ] + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + } + ] + }, + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ] + } + ], + "sets": ["set1", "set2"] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "all_feature", + "seed": 1699838640, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ] + } + ], + "sets": ["set4"] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "killed_feature", + "seed": -480091424, + "status": "ACTIVE", + "killed": true, + "changeNumber": 123, + "defaultTreatment": "defTreatment", + "configurations": { + "off": "{\"size\":15,\"test\":20}", + "defTreatment": "{\"size\":15,\"defTreatment\":true}" + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "defTreatment", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ] + } + ], + "sets": ["set3"] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "sample_feature", + "seed": 1548363147, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "configurations": { + "on": "{\"size\":15,\"test\":20}" + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "employees" + }, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + } + ] + }, + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "human_beigns" + }, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 30 + }, + { + "treatment": "off", + "size": 70 + } + ] + } + ], + "sets": ["set1"] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "dependency_test", + "seed": 1222652054, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "IN_SPLIT_TREATMENT", + "negate": false, + "userDefinedSegmentMatcherData": null, + "dependencyMatcherData": { + "split": "all_feature", + "treatments": ["on"] + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ] + } + ], + "sets": [] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "regex_test", + "seed": 1222652051, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "MATCHES_STRING", + "negate": false, + "userDefinedSegmentMatcherData": null, + "stringMatcherData": "abc[0-9]" + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ] + } + ], + "sets": [] + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "boolean_test", + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "seed": 12321809, + "defaultTreatment": "off", + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "EQUAL_TO_BOOLEAN", + "negate": false, + "userDefinedSegmentMatcherData": null, + "booleanMatcherData": true + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ] + } + ], + "sets": [] + } + ], + "since": -1, + "till": 1457726098069 +} \ No newline at end of file diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index b1f5d836..140968ce 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -1,5 +1,6 @@ """Client integration tests.""" # pylint: disable=protected-access,line-too-long,no-self-use +from asyncio import Queue import json import os import threading @@ -41,6 +42,7 @@ from splitio.sync.synchronizer import PluggableSynchronizer, PluggableSynchronizerAsync from splitio.sync.telemetry import RedisTelemetrySubmitter, RedisTelemetrySubmitterAsync +from tests.helpers.mockserver import SplitMockServer from tests.integration import splits_json from tests.storage.test_pluggable import StorageMockAdapter, StorageMockAdapterAsync @@ -99,7 +101,7 @@ def _validate_last_events(client, *to_validate): as_tup_set = set((i.key, i.traffic_type_name, i.event_type_id, i.value, str(i.properties)) for i in events) assert as_tup_set == set(to_validate) -def _get_treatment(factory): +def _get_treatment(factory, skip_rbs=False): """Test client.get_treatment().""" try: client = factory.client() @@ -156,6 +158,9 @@ def _get_treatment(factory): if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): _validate_last_impressions(client, ('regex_test', 'abc4', 'on')) + if skip_rbs: + return + # test rule based segment matcher assert client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): @@ -419,7 +424,7 @@ def _track(factory): ('user1', 'user', 'conversion', 1, "{'prop1': 'value1'}") ) -def _manager_methods(factory): +def _manager_methods(factory, skip_rbs=False): """Test manager.split/splits.""" try: manager = factory.manager() @@ -450,6 +455,11 @@ def _manager_methods(factory): assert result.change_number == 123 assert result.configs['on'] == '{"size":15,"test":20}' + if skip_rbs: + assert len(manager.split_names()) == 7 + assert len(manager.splits()) == 7 + return + assert len(manager.split_names()) == 8 assert len(manager.splits()) == 8 @@ -745,6 +755,159 @@ def test_track(self): """Test client.track().""" _track(self.factory) +class InMemoryOldSpecIntegrationTests(object): + """Inmemory storage-based integration tests.""" + + def setup_method(self): + """Prepare storages with test data.""" + + split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_old_spec.json') + with open(split_fn, 'r') as flo: + data = json.loads(flo.read()) + + split_changes = { + -1: data, + 1457726098069: {"splits": [], "till": 1457726098069, "since": 1457726098069} + } + + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') + with open(segment_fn, 'r') as flo: + segment_employee = json.loads(flo.read()) + + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentHumanBeignsChanges.json') + with open(segment_fn, 'r') as flo: + segment_human = json.loads(flo.read()) + + segment_changes = { + ("employees", -1): segment_employee, + ("employees", 1457474612832): {"name": "employees","added": [],"removed": [],"since": 1457474612832,"till": 1457474612832}, + ("human_beigns", -1): segment_human, + ("human_beigns", 1457102183278): {"name": "employees","added": [],"removed": [],"since": 1457102183278,"till": 1457102183278}, + } + + split_backend_requests = Queue() + self.split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests, + {'auth_response': {'pushEnabled': False}}, True) + self.split_backend.start() + + kwargs = { + 'sdk_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'events_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'auth_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'config': {'connectTimeout': 10000, 'streamingEnabled': False, 'impressionsMode': 'debug'} + } + + self.factory = get_factory('some_apikey', **kwargs) + self.factory.block_until_ready(1) + assert self.factory.ready + + def teardown_method(self): + """Shut down the factory.""" + event = threading.Event() + self.factory.destroy(event) + event.wait() + self.split_backend.stop() + time.sleep(1) + + def test_get_treatment(self): + """Test client.get_treatment().""" + _get_treatment(self.factory, True) + + def test_get_treatment_with_config(self): + """Test client.get_treatment_with_config().""" + _get_treatment_with_config(self.factory) + + def test_get_treatments(self): + _get_treatments(self.factory) + # testing multiple splitNames + client = self.factory.client() + result = client.get_treatments('invalidKey', [ + 'all_feature', + 'killed_feature', + 'invalid_feature', + 'sample_feature' + ]) + assert len(result) == 4 + assert result['all_feature'] == 'on' + assert result['killed_feature'] == 'defTreatment' + assert result['invalid_feature'] == 'control' + assert result['sample_feature'] == 'off' + _validate_last_impressions( + client, + ('all_feature', 'invalidKey', 'on'), + ('killed_feature', 'invalidKey', 'defTreatment'), + ('sample_feature', 'invalidKey', 'off') + ) + + def test_get_treatments_with_config(self): + """Test client.get_treatments_with_config().""" + _get_treatments_with_config(self.factory) + # testing multiple splitNames + client = self.factory.client() + result = client.get_treatments_with_config('invalidKey', [ + 'all_feature', + 'killed_feature', + 'invalid_feature', + 'sample_feature' + ]) + assert len(result) == 4 + assert result['all_feature'] == ('on', None) + assert result['killed_feature'] == ('defTreatment', '{"size":15,"defTreatment":true}') + assert result['invalid_feature'] == ('control', None) + assert result['sample_feature'] == ('off', None) + _validate_last_impressions( + client, + ('all_feature', 'invalidKey', 'on'), + ('killed_feature', 'invalidKey', 'defTreatment'), + ('sample_feature', 'invalidKey', 'off'), + ) + + def test_get_treatments_by_flag_set(self): + """Test client.get_treatments_by_flag_set().""" + _get_treatments_by_flag_set(self.factory) + + def test_get_treatments_by_flag_sets(self): + """Test client.get_treatments_by_flag_sets().""" + _get_treatments_by_flag_sets(self.factory) + client = self.factory.client() + result = client.get_treatments_by_flag_sets('user1', ['set1', 'set2', 'set4']) + assert len(result) == 3 + assert result == {'sample_feature': 'on', + 'whitelist_feature': 'off', + 'all_feature': 'on' + } + _validate_last_impressions(client, ('sample_feature', 'user1', 'on'), + ('whitelist_feature', 'user1', 'off'), + ('all_feature', 'user1', 'on') + ) + + def test_get_treatments_with_config_by_flag_set(self): + """Test client.get_treatments_with_config_by_flag_set().""" + _get_treatments_with_config_by_flag_set(self.factory) + + def test_get_treatments_with_config_by_flag_sets(self): + """Test client.get_treatments_with_config_by_flag_sets().""" + _get_treatments_with_config_by_flag_sets(self.factory) + client = self.factory.client() + result = client.get_treatments_with_config_by_flag_sets('user1', ['set1', 'set2', 'set4']) + assert len(result) == 3 + assert result == {'sample_feature': ('on', '{"size":15,"test":20}'), + 'whitelist_feature': ('off', None), + 'all_feature': ('on', None) + } + _validate_last_impressions(client, ('sample_feature', 'user1', 'on'), + ('whitelist_feature', 'user1', 'off'), + ('all_feature', 'user1', 'on') + ) + + def test_track(self): + """Test client.track().""" + _track(self.factory) + + def test_manager_methods(self): + """Test manager.split/splits.""" + _manager_methods(self.factory, True) + class RedisIntegrationTests(object): """Redis storage-based integration tests.""" @@ -2423,6 +2586,194 @@ async def test_track(self): await _track_async(self.factory) await self.factory.destroy() +class InMemoryOldSpecIntegrationAsyncTests(object): + """Inmemory storage-based integration tests.""" + + def setup_method(self): + self.setup_task = asyncio.get_event_loop().create_task(self._setup_method()) + + async def _setup_method(self): + """Prepare storages with test data.""" + + split_fn = os.path.join(os.path.dirname(__file__), 'files', 'split_old_spec.json') + with open(split_fn, 'r') as flo: + data = json.loads(flo.read()) + + split_changes = { + -1: data, + 1457726098069: {"splits": [], "till": 1457726098069, "since": 1457726098069} + } + + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentEmployeesChanges.json') + with open(segment_fn, 'r') as flo: + segment_employee = json.loads(flo.read()) + + segment_fn = os.path.join(os.path.dirname(__file__), 'files', 'segmentHumanBeignsChanges.json') + with open(segment_fn, 'r') as flo: + segment_human = json.loads(flo.read()) + + segment_changes = { + ("employees", -1): segment_employee, + ("employees", 1457474612832): {"name": "employees","added": [],"removed": [],"since": 1457474612832,"till": 1457474612832}, + ("human_beigns", -1): segment_human, + ("human_beigns", 1457102183278): {"name": "employees","added": [],"removed": [],"since": 1457102183278,"till": 1457102183278}, + } + + split_backend_requests = Queue() + self.split_backend = SplitMockServer(split_changes, segment_changes, split_backend_requests, + {'auth_response': {'pushEnabled': False}}, True) + self.split_backend.start() + + kwargs = { + 'sdk_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'events_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'auth_api_base_url': 'http://localhost:%d/api' % self.split_backend.port(), + 'config': {'connectTimeout': 10000, 'streamingEnabled': False, 'impressionsMode': 'debug'} + } + + self.factory = await get_factory_async('some_apikey', **kwargs) + await self.factory.block_until_ready(1) + assert self.factory.ready + + @pytest.mark.asyncio + async def test_get_treatment(self): + """Test client.get_treatment().""" + await self.setup_task + await _get_treatment_async(self.factory, True) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatment_with_config(self): + """Test client.get_treatment_with_config().""" + await self.setup_task + await _get_treatment_with_config_async(self.factory) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments(self): + await self.setup_task + await _get_treatments_async(self.factory) + # testing multiple splitNames + client = self.factory.client() + result = await client.get_treatments('invalidKey', [ + 'all_feature', + 'killed_feature', + 'invalid_feature', + 'sample_feature' + ]) + assert len(result) == 4 + assert result['all_feature'] == 'on' + assert result['killed_feature'] == 'defTreatment' + assert result['invalid_feature'] == 'control' + assert result['sample_feature'] == 'off' + await _validate_last_impressions_async( + client, + ('all_feature', 'invalidKey', 'on'), + ('killed_feature', 'invalidKey', 'defTreatment'), + ('sample_feature', 'invalidKey', 'off') + ) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_with_config(self): + """Test client.get_treatments_with_config().""" + await self.setup_task + await _get_treatments_with_config_async(self.factory) + # testing multiple splitNames + client = self.factory.client() + result = await client.get_treatments_with_config('invalidKey', [ + 'all_feature', + 'killed_feature', + 'invalid_feature', + 'sample_feature' + ]) + assert len(result) == 4 + assert result['all_feature'] == ('on', None) + assert result['killed_feature'] == ('defTreatment', '{"size":15,"defTreatment":true}') + assert result['invalid_feature'] == ('control', None) + assert result['sample_feature'] == ('off', None) + await _validate_last_impressions_async( + client, + ('all_feature', 'invalidKey', 'on'), + ('killed_feature', 'invalidKey', 'defTreatment'), + ('sample_feature', 'invalidKey', 'off'), + ) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_by_flag_set(self): + """Test client.get_treatments_by_flag_set().""" + await self.setup_task + await _get_treatments_by_flag_set_async(self.factory) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_by_flag_sets(self): + """Test client.get_treatments_by_flag_sets().""" + await self.setup_task + await _get_treatments_by_flag_sets_async(self.factory) + client = self.factory.client() + result = await client.get_treatments_by_flag_sets('user1', ['set1', 'set2', 'set4']) + assert len(result) == 3 + assert result == {'sample_feature': 'on', + 'whitelist_feature': 'off', + 'all_feature': 'on' + } + await _validate_last_impressions_async(client, ('sample_feature', 'user1', 'on'), + ('whitelist_feature', 'user1', 'off'), + ('all_feature', 'user1', 'on') + ) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_with_config_by_flag_set(self): + """Test client.get_treatments_with_config_by_flag_set().""" + await self.setup_task + await _get_treatments_with_config_by_flag_set_async(self.factory) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_get_treatments_with_config_by_flag_sets(self): + """Test client.get_treatments_with_config_by_flag_sets().""" + await self.setup_task + await _get_treatments_with_config_by_flag_sets_async(self.factory) + client = self.factory.client() + result = await client.get_treatments_with_config_by_flag_sets('user1', ['set1', 'set2', 'set4']) + assert len(result) == 3 + assert result == {'sample_feature': ('on', '{"size":15,"test":20}'), + 'whitelist_feature': ('off', None), + 'all_feature': ('on', None) + } + await _validate_last_impressions_async(client, ('sample_feature', 'user1', 'on'), + ('whitelist_feature', 'user1', 'off'), + ('all_feature', 'user1', 'on') + ) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_track(self): + """Test client.track().""" + await self.setup_task + await _track_async(self.factory) + await self.factory.destroy() + self.split_backend.stop() + + @pytest.mark.asyncio + async def test_manager_methods(self): + """Test manager.split/splits.""" + await self.setup_task + await _manager_methods_async(self.factory, True) + await self.factory.destroy() + self.split_backend.stop() + class RedisIntegrationAsyncTests(object): """Redis storage-based integration tests.""" @@ -4048,7 +4399,7 @@ async def _validate_last_events_async(client, *to_validate): as_tup_set = set((i.key, i.traffic_type_name, i.event_type_id, i.value, str(i.properties)) for i in events) assert as_tup_set == set(to_validate) -async def _get_treatment_async(factory): +async def _get_treatment_async(factory, skip_rbs=False): """Test client.get_treatment().""" try: client = factory.client() @@ -4105,6 +4456,9 @@ async def _get_treatment_async(factory): if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): await _validate_last_impressions_async(client, ('regex_test', 'abc4', 'on')) + if skip_rbs: + return + # test rule based segment matcher assert await client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): @@ -4368,7 +4722,7 @@ async def _track_async(factory): ('user1', 'user', 'conversion', 1, "{'prop1': 'value1'}") ) -async def _manager_methods_async(factory): +async def _manager_methods_async(factory, skip_rbs=False): """Test manager.split/splits.""" try: manager = factory.manager() @@ -4399,5 +4753,10 @@ async def _manager_methods_async(factory): assert result.change_number == 123 assert result.configs['on'] == '{"size":15,"test":20}' + if skip_rbs: + assert len(await manager.split_names()) == 7 + assert len(await manager.splits()) == 7 + return + assert len(await manager.split_names()) == 8 assert len(await manager.splits()) == 8 diff --git a/tests/models/grammar/test_matchers.py b/tests/models/grammar/test_matchers.py index d4d09aae..12e4bda8 100644 --- a/tests/models/grammar/test_matchers.py +++ b/tests/models/grammar/test_matchers.py @@ -12,6 +12,7 @@ from splitio.models.grammar import matchers from splitio.models import splits +from splitio.models import rule_based_segments from splitio.models.grammar import condition from splitio.models.grammar.matchers.utils.utils import Semver from splitio.storage import SegmentStorage @@ -1095,3 +1096,46 @@ def test_to_str(self): """Test that the object serializes to str properly.""" as_str = matchers.InListSemverMatcher(self.raw) assert str(as_str) == "in list semver ['2.1.8', '2.1.11']" + +class RuleBasedMatcherTests(MatcherTestsBase): + """Rule based segment matcher test cases.""" + + raw ={ + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": False, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + + def test_from_raw(self, mocker): + """Test parsing from raw json/dict.""" + parsed = matchers.from_raw(self.raw) + assert isinstance(parsed, matchers.RuleBasedSegmentMatcher) + + def test_to_json(self): + """Test that the object serializes to JSON properly.""" + as_json = matchers.AllKeysMatcher(self.raw).to_json() + assert as_json['matcherType'] == 'IN_RULE_BASED_SEGMENT' + + def test_matcher_behaviour(self, mocker): + """Test if the matcher works properly.""" + rbs_segments = os.path.join(os.path.dirname(__file__), '../../engine/files', 'rule_base_segments3.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + matcher = matchers.RuleBasedSegmentMatcher(self.raw) + ec ={'ec': EvaluationContext( + {}, + {}, + {}, + {"sample_rule_based_segment": rbs.conditions}, + {} + )} + assert matcher._match(None, context=ec) is False + assert matcher._match('bilal@split.io', context=ec) is False + assert matcher._match('bilal@split.io', {'email': 'bilal@split.io'}, context=ec) is True \ No newline at end of file diff --git a/tests/models/test_rule_based_segments.py b/tests/models/test_rule_based_segments.py index 3ad36773..98e35fe8 100644 --- a/tests/models/test_rule_based_segments.py +++ b/tests/models/test_rule_based_segments.py @@ -1,9 +1,9 @@ """Split model tests module.""" import copy -import pytest from splitio.models import rule_based_segments from splitio.models import splits from splitio.models.grammar.condition import Condition +from splitio.models.grammar.matchers.rule_based_segment import RuleBasedSegmentMatcher class RuleBasedSegmentModelTests(object): """Rule based segment model tests.""" @@ -100,5 +100,4 @@ def test_get_condition_segment_names(self): }) rbs = rule_based_segments.from_raw(rbs) - assert rbs.get_condition_segment_names() == {"employees"} - \ No newline at end of file + assert rbs.get_condition_segment_names() == {"employees"} \ No newline at end of file From e649a3c2fd94591a1ff3850962638a631693bd63 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 5 May 2025 17:43:54 -0700 Subject: [PATCH 24/56] polish --- splitio/engine/evaluator.py | 12 ++++-- tests/engine/test_evaluator.py | 70 ++++++++++++++++++++++++++++++++++ 2 files changed, 79 insertions(+), 3 deletions(-) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index 4306dff2..12466350 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -195,14 +195,20 @@ async def context_for(self, key, feature_names): splits.update(features) pending = set() for feature in features.values(): - cf, cs, crbs = get_dependencies(feature) + cf, cs, crbs = get_dependencies(feature) + for rbs in crbs: + rbs_cf, rbs_cs, rbs_crbs = get_dependencies(await self._rbs_segment_storage.get(rbs)) + cf.extend(rbs_cf) + cs.extend(rbs_cs) + crbs.extend(rbs_crbs) + pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) pending_rbs_memberships.update(crbs) rbs_segment_memberships = {} rbs_segment_conditions = {} - excluded_rbs_segments = {} + excluded_rbs_segments = set() key_membership = False segment_memberhsip = False for rbs_segment in pending_rbs_memberships: @@ -218,7 +224,7 @@ async def context_for(self, key, feature_names): if excluded_segment.type == SegmentType.RULE_BASED: rbs_segment = await self._rbs_segment_storage.get(excluded_segment.name) if rbs_segment is not None: - excluded_rbs_segments.update() + excluded_rbs_segments.add(rbs_segment) rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) if not (segment_memberhsip or key_membership): diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 102f3db0..fe082ce2 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -315,6 +315,76 @@ def test_using_rbs_in_excluded(self): assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" ctx = evaluation_facctory.context_for('mauro@split.io', ['some']) assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "on" + + @pytest.mark.asyncio + async def test_evaluate_treatment_with_rbs_in_condition_async(self): + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + evaluation_facctory = AsyncEvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) + await rbs_storage.update([rbs, rbs2], [], 12) + await splits_storage.update([mocked_split], [], 12) + + ctx = await evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = await evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "off" + + @pytest.mark.asyncio + async def test_using_segment_in_excluded_async(self): + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments3.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + evaluation_facctory = AsyncEvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + await rbs_storage.update([rbs], [], 12) + await splits_storage.update([mocked_split], [], 12) + segment = segments.from_raw({'name': 'segment1', 'added': ['pato@split.io'], 'removed': [], 'till': 123}) + await segment_storage.put(segment) + + ctx = await evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = await evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "off" + ctx = await evaluation_facctory.context_for('pato@split.io', ['some']) + assert e.eval_with_context('pato@split.io', 'pato@split.io', 'some', {'email': 'pato@split.io'}, ctx)['treatment'] == "off" + + @pytest.mark.asyncio + async def test_using_rbs_in_excluded_async(self): + rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments2.json') + with open(rbs_segments, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + evaluation_facctory = AsyncEvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) + await rbs_storage.update([rbs, rbs2], [], 12) + await splits_storage.update([mocked_split], [], 12) + + ctx = await evaluation_facctory.context_for('bilal@split.io', ['some']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = await evaluation_facctory.context_for('mauro@split.io', ['some']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "on" class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" From 3eff00cb55723690ee809905015a15811ccda028 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Fri, 9 May 2025 09:29:30 -0700 Subject: [PATCH 25/56] polish --- splitio/models/grammar/matchers/rule_based_segment.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index 30fff738..db531aeb 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -37,10 +37,7 @@ def _match(self, key, attributes=None, context=None): if self._match_conditions(rbs_segment.conditions, key, attributes, context): return True - if self._match_conditions(context['ec'].segment_rbs_conditions.get(self._rbs_segment_name), key, attributes, context): - return True - - return False + return self._match_conditions(context['ec'].segment_rbs_conditions.get(self._rbs_segment_name), key, attributes, context): def _add_matcher_specific_properties_to_json(self): """Return UserDefinedSegment specific properties.""" From f3e9137b16dac750d61f00d004013d3a500b7cdb Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 12 May 2025 17:31:21 -0700 Subject: [PATCH 26/56] Update rb segment matcher --- splitio/engine/evaluator.py | 142 +++++++----------- .../grammar/matchers/rule_based_segment.py | 37 ++++- splitio/storage/inmemmory.py | 6 + splitio/storage/pluggable.py | 38 +++++ splitio/storage/redis.py | 58 +++++++ tests/client/test_client.py | 2 +- tests/client/test_input_validator.py | 87 ++++++++--- tests/engine/files/rule_base_segments.json | 1 + tests/engine/files/rule_base_segments2.json | 4 +- tests/engine/test_evaluator.py | 54 +++---- tests/integration/test_client_e2e.py | 2 +- tests/models/grammar/test_matchers.py | 10 +- tests/storage/test_redis.py | 41 +++++ 13 files changed, 326 insertions(+), 156 deletions(-) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index 12466350..d3e05f78 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -11,7 +11,7 @@ from splitio.optional.loaders import asyncio CONTROL = 'control' -EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships', 'segment_rbs_memberships', 'segment_rbs_conditions', 'excluded_rbs_segments']) +EvaluationContext = namedtuple('EvaluationContext', ['flags', 'segment_memberships', 'rbs_segments']) _LOGGER = logging.getLogger(__name__) @@ -115,59 +115,24 @@ def context_for(self, key, feature_names): :rtype: EvaluationContext """ pending = set(feature_names) + pending_rbs = set() splits = {} + rb_segments = {} pending_memberships = set() - pending_rbs_memberships = set() - while pending: + while pending or pending_rbs: fetched = self._flag_storage.fetch_many(list(pending)) - features = filter_missing(fetched) - splits.update(features) - pending = set() - for feature in features.values(): - cf, cs, crbs = get_dependencies(feature) - for rbs in crbs: - rbs_cf, rbs_cs, rbs_crbs = get_dependencies(self._rbs_segment_storage.get(rbs)) - cf.extend(rbs_cf) - cs.extend(rbs_cs) - crbs.extend(rbs_crbs) - - pending.update(filter(lambda f: f not in splits, cf)) - pending_memberships.update(cs) - pending_rbs_memberships.update(crbs) - - rbs_segment_memberships = {} - rbs_segment_conditions = {} - excluded_rbs_segments = set() - key_membership = False - segment_memberhsip = False - for rbs_segment in pending_rbs_memberships: - rbs_segment_obj = self._rbs_segment_storage.get(rbs_segment) - pending_memberships.update(rbs_segment_obj.get_condition_segment_names()) - - key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() - segment_memberhsip = False - for excluded_segment in rbs_segment_obj.excluded.get_excluded_segments(): - if excluded_segment.type == SegmentType.STANDARD and self._segment_storage.segment_contains(excluded_segment.name, key): - segment_memberhsip = True - - if excluded_segment.type == SegmentType.RULE_BASED: - rbs_segment = self._rbs_segment_storage.get(excluded_segment.name) - if rbs_segment is not None: - excluded_rbs_segments.add(rbs_segment) - - rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) - if not (segment_memberhsip or key_membership): - rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) - + fetched_rbs = self._rbs_segment_storage.fetch_many(list(pending_rbs)) + features, rbsegments, splits, rb_segments = update_objects(fetched, fetched_rbs, splits, rb_segments) + pending, pending_memberships, pending_rbs = get_pending_objects(features, splits, rbsegments, rb_segments, pending_memberships) + return EvaluationContext( splits, { segment: self._segment_storage.segment_contains(segment, key) for segment in pending_memberships }, - rbs_segment_memberships, - rbs_segment_conditions, - excluded_rbs_segments + rb_segments ) + class AsyncEvaluationDataFactory: @@ -186,72 +151,36 @@ async def context_for(self, key, feature_names): :rtype: EvaluationContext """ pending = set(feature_names) + pending_rbs = set() splits = {} + rb_segments = {} pending_memberships = set() - pending_rbs_memberships = set() - while pending: + while pending or pending_rbs: fetched = await self._flag_storage.fetch_many(list(pending)) - features = filter_missing(fetched) - splits.update(features) - pending = set() - for feature in features.values(): - cf, cs, crbs = get_dependencies(feature) - for rbs in crbs: - rbs_cf, rbs_cs, rbs_crbs = get_dependencies(await self._rbs_segment_storage.get(rbs)) - cf.extend(rbs_cf) - cs.extend(rbs_cs) - crbs.extend(rbs_crbs) - - pending.update(filter(lambda f: f not in splits, cf)) - pending_memberships.update(cs) - pending_rbs_memberships.update(crbs) - - rbs_segment_memberships = {} - rbs_segment_conditions = {} - excluded_rbs_segments = set() - key_membership = False - segment_memberhsip = False - for rbs_segment in pending_rbs_memberships: - rbs_segment_obj = await self._rbs_segment_storage.get(rbs_segment) - pending_memberships.update(rbs_segment_obj.get_condition_segment_names()) - - key_membership = key in rbs_segment_obj.excluded.get_excluded_keys() - segment_memberhsip = False - for excluded_segment in rbs_segment_obj.excluded.get_excluded_segments(): - if excluded_segment.type == SegmentType.STANDARD and await self._segment_storage.segment_contains(excluded_segment.name, key): - segment_memberhsip = True - - if excluded_segment.type == SegmentType.RULE_BASED: - rbs_segment = await self._rbs_segment_storage.get(excluded_segment.name) - if rbs_segment is not None: - excluded_rbs_segments.add(rbs_segment) - - rbs_segment_memberships.update({rbs_segment: segment_memberhsip or key_membership}) - if not (segment_memberhsip or key_membership): - rbs_segment_conditions.update({rbs_segment: [condition for condition in rbs_segment_obj.conditions]}) + fetched_rbs = await self._rbs_segment_storage.fetch_many(list(pending_rbs)) + features, rbsegments, splits, rb_segments = update_objects(fetched, fetched_rbs, splits, rb_segments) + pending, pending_memberships, pending_rbs = get_pending_objects(features, splits, rbsegments, rb_segments, pending_memberships) segment_names = list(pending_memberships) segment_memberships = await asyncio.gather(*[ self._segment_storage.segment_contains(segment, key) for segment in segment_names ]) + return EvaluationContext( splits, dict(zip(segment_names, segment_memberships)), - rbs_segment_memberships, - rbs_segment_conditions, - excluded_rbs_segments + rb_segments ) - -def get_dependencies(feature): +def get_dependencies(object): """ :rtype: tuple(list, list) """ feature_names = [] segment_names = [] rbs_segment_names = [] - for condition in feature.conditions: + for condition in object.conditions: for matcher in condition.matchers: if isinstance(matcher,RuleBasedSegmentMatcher): rbs_segment_names.append(matcher._rbs_segment_name) @@ -264,3 +193,34 @@ def get_dependencies(feature): def filter_missing(features): return {k: v for (k, v) in features.items() if v is not None} + +def get_pending_objects(features, splits, rbsegments, rb_segments, pending_memberships): + pending = set() + pending_rbs = set() + for feature in features.values(): + cf, cs, crbs = get_dependencies(feature) + pending.update(filter(lambda f: f not in splits, cf)) + pending_memberships.update(cs) + pending_rbs.update(filter(lambda f: f not in rb_segments, crbs)) + + for rb_segment in rbsegments.values(): + cf, cs, crbs = get_dependencies(rb_segment) + pending.update(filter(lambda f: f not in splits, cf)) + pending_memberships.update(cs) + for excluded_segment in rb_segment.excluded.get_excluded_segments(): + if excluded_segment.type == SegmentType.STANDARD: + pending_memberships.add(excluded_segment.name) + else: + pending_rbs.update(filter(lambda f: f not in rb_segments, [excluded_segment.name])) + pending_rbs.update(filter(lambda f: f not in rb_segments, crbs)) + + return pending, pending_memberships, pending_rbs + +def update_objects(fetched, fetched_rbs, splits, rb_segments): + features = filter_missing(fetched) + rbsegments = filter_missing(fetched_rbs) + splits.update(features) + rb_segments.update(rbsegments) + + return features, rbsegments, splits, rb_segments + \ No newline at end of file diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index db531aeb..3e12a348 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -1,5 +1,6 @@ """Rule based segment matcher classes.""" from splitio.models.grammar.matchers.base import Matcher +from splitio.models.rule_based_segments import SegmentType class RuleBasedSegmentMatcher(Matcher): @@ -29,15 +30,15 @@ def _match(self, key, attributes=None, context=None): if self._rbs_segment_name == None: return False - # Check if rbs segment has exclusions - if context['ec'].segment_rbs_memberships.get(self._rbs_segment_name): - return False - - for rbs_segment in context['ec'].excluded_rbs_segments: - if self._match_conditions(rbs_segment.conditions, key, attributes, context): - return True + rb_segment = context['ec'].rbs_segments.get(self._rbs_segment_name) - return self._match_conditions(context['ec'].segment_rbs_conditions.get(self._rbs_segment_name), key, attributes, context): + if key in rb_segment.excluded.get_excluded_keys(): + return False + + if self._match_dep_rb_segments(rb_segment.excluded.get_excluded_segments(), key, attributes, context): + return False + + return self._match_conditions(rb_segment.conditions, key, attributes, context) def _add_matcher_specific_properties_to_json(self): """Return UserDefinedSegment specific properties.""" @@ -51,3 +52,23 @@ def _match_conditions(self, rbs_segment_conditions, key, attributes, context): for parsed_condition in rbs_segment_conditions: if parsed_condition.matches(key, attributes, context): return True + + return False + + def _match_dep_rb_segments(self, excluded_rb_segments, key, attributes, context): + for excluded_rb_segment in excluded_rb_segments: + if excluded_rb_segment.type == SegmentType.STANDARD: + if context['ec'].segment_memberships[excluded_rb_segment.name]: + return True + else: + excluded_segment = context['ec'].rbs_segments.get(excluded_rb_segment.name) + if key in excluded_segment.excluded.get_excluded_keys(): + return True + + if self._match_dep_rb_segments(excluded_segment.excluded.get_excluded_segments(), key, attributes, context): + return True + + if self._match_conditions(excluded_segment.conditions, key, attributes, context): + return True + + return False diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index 9f215eed..c7c1a7bf 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -230,6 +230,9 @@ def contains(self, segment_names): """ with self._lock: return set(segment_names).issubset(self._rule_based_segments.keys()) + + def fetch_many(self, segment_names): + return {rb_segment_name: self.get(rb_segment_name) for rb_segment_name in segment_names} class InMemoryRuleBasedSegmentStorageAsync(RuleBasedSegmentsStorage): """InMemory implementation of a feature flag storage base.""" @@ -354,6 +357,9 @@ async def contains(self, segment_names): async with self._lock: return set(segment_names).issubset(self._rule_based_segments.keys()) + async def fetch_many(self, segment_names): + return {rb_segment_name: await self.get(rb_segment_name) for rb_segment_name in segment_names} + class InMemorySplitStorageBase(SplitStorage): """InMemory implementation of a feature flag storage base.""" diff --git a/splitio/storage/pluggable.py b/splitio/storage/pluggable.py index c27a92fd..36b27d7d 100644 --- a/splitio/storage/pluggable.py +++ b/splitio/storage/pluggable.py @@ -177,6 +177,25 @@ def get_segment_names(self): _LOGGER.error('Error getting rule based segments names from storage') _LOGGER.debug('Error: ', exc_info=True) return None + + def fetch_many(self, rb_segment_names): + """ + Retrieve rule based segments. + + :param rb_segment_names: Names of the rule based segments to fetch. + :type rb_segment_names: list(str) + + :return: A dict with rule based segment objects parsed from queue. + :rtype: dict(rb_segment_names, splitio.models.rile_based_segment.RuleBasedSegment) + """ + try: + prefix_added = [self._prefix.format(segment_name=rb_segment_name) for rb_segment_name in rb_segment_names] + return {rb_segment['name']: rule_based_segments.from_raw(rb_segment) for rb_segment in self._pluggable_adapter.get_many(prefix_added)} + + except Exception: + _LOGGER.error('Error getting rule based segments from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None class PluggableRuleBasedSegmentsStorageAsync(PluggableRuleBasedSegmentsStorageBase): """Pluggable storage for rule based segments.""" @@ -256,6 +275,25 @@ async def get_segment_names(self): _LOGGER.debug('Error: ', exc_info=True) return None + async def fetch_many(self, rb_segment_names): + """ + Retrieve rule based segments. + + :param rb_segment_names: Names of the rule based segments to fetch. + :type rb_segment_names: list(str) + + :return: A dict with rule based segment objects parsed from queue. + :rtype: dict(rb_segment_names, splitio.models.rile_based_segment.RuleBasedSegment) + """ + try: + prefix_added = [self._prefix.format(segment_name=rb_segment_name) for rb_segment_name in rb_segment_names] + return {rb_segment['name']: rule_based_segments.from_raw(rb_segment) for rb_segment in await self._pluggable_adapter.get_many(prefix_added)} + + except Exception: + _LOGGER.error('Error getting rule based segments from storage') + _LOGGER.debug('Error: ', exc_info=True) + return None + class PluggableSplitStorageBase(SplitStorage): """InMemory implementation of a feature flag storage.""" diff --git a/splitio/storage/redis.py b/splitio/storage/redis.py index e5398cf7..09ddee29 100644 --- a/splitio/storage/redis.py +++ b/splitio/storage/redis.py @@ -131,6 +131,35 @@ def get_large_segment_names(self): """ pass + def fetch_many(self, segment_names): + """ + Retrieve rule based segment. + + :param segment_names: Names of the rule based segments to fetch. + :type segment_names: list(str) + + :return: A dict with rule based segment objects parsed from redis. + :rtype: dict(segment_name, splitio.models.rule_based_segment.RuleBasedSegment) + """ + to_return = dict() + try: + keys = [self._get_key(segment_name) for segment_name in segment_names] + raw_rbs_segments = self._redis.mget(keys) + _LOGGER.debug("Fetchting rule based segment [%s] from redis" % segment_names) + _LOGGER.debug(raw_rbs_segments) + for i in range(len(raw_rbs_segments)): + rbs_segment = None + try: + rbs_segment = rule_based_segments.from_raw(json.loads(raw_rbs_segments[i])) + except (ValueError, TypeError): + _LOGGER.error('Could not parse rule based segment.') + _LOGGER.debug("Raw rule based segment that failed parsing attempt: %s", raw_rbs_segments[i]) + to_return[segment_names[i]] = rbs_segment + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segments from storage') + _LOGGER.debug('Error: ', exc_info=True) + return to_return + class RedisRuleBasedSegmentsStorageAsync(RuleBasedSegmentsStorage): """Redis-based storage for rule based segments.""" @@ -246,6 +275,35 @@ async def get_large_segment_names(self): """ pass + async def fetch_many(self, segment_names): + """ + Retrieve rule based segment. + + :param segment_names: Names of the rule based segments to fetch. + :type segment_names: list(str) + + :return: A dict with rule based segment objects parsed from redis. + :rtype: dict(segment_name, splitio.models.rule_based_segment.RuleBasedSegment) + """ + to_return = dict() + try: + keys = [self._get_key(segment_name) for segment_name in segment_names] + raw_rbs_segments = await self._redis.mget(keys) + _LOGGER.debug("Fetchting rule based segment [%s] from redis" % segment_names) + _LOGGER.debug(raw_rbs_segments) + for i in range(len(raw_rbs_segments)): + rbs_segment = None + try: + rbs_segment = rule_based_segments.from_raw(json.loads(raw_rbs_segments[i])) + except (ValueError, TypeError): + _LOGGER.error('Could not parse rule based segment.') + _LOGGER.debug("Raw rule based segment that failed parsing attempt: %s", raw_rbs_segments[i]) + to_return[segment_names[i]] = rbs_segment + except RedisAdapterException: + _LOGGER.error('Error fetching rule based segments from storage') + _LOGGER.debug('Error: ', exc_info=True) + return to_return + class RedisSplitStorageBase(SplitStorage): """Redis-based storage base for feature flags.""" diff --git a/tests/client/test_client.py b/tests/client/test_client.py index 526b7347..49b6ba7a 100644 --- a/tests/client/test_client.py +++ b/tests/client/test_client.py @@ -1054,7 +1054,7 @@ def test_telemetry_record_treatment_exception(self, mocker): split_storage = InMemorySplitStorage() split_storage.update([from_raw(splits_json['splitChange1_1']['ff']['d'][0])], [], -1) segment_storage = mocker.Mock(spec=SegmentStorage) - rb_segment_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rb_segment_storage = InMemoryRuleBasedSegmentStorage() impression_storage = mocker.Mock(spec=ImpressionStorage) event_storage = mocker.Mock(spec=EventStorage) destroyed_property = mocker.PropertyMock() diff --git a/tests/client/test_input_validator.py b/tests/client/test_input_validator.py index 81b1c06b..2f15d038 100644 --- a/tests/client/test_input_validator.py +++ b/tests/client/test_input_validator.py @@ -8,7 +8,7 @@ from splitio.client.key import Key from splitio.storage import SplitStorage, EventStorage, ImpressionStorage, SegmentStorage, RuleBasedSegmentsStorage from splitio.storage.inmemmory import InMemoryTelemetryStorage, InMemoryTelemetryStorageAsync, \ - InMemorySplitStorage, InMemorySplitStorageAsync + InMemorySplitStorage, InMemorySplitStorageAsync, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync from splitio.models.splits import Split from splitio.client import input_validator from splitio.recorder.recorder import StandardRecorder, StandardRecorderAsync @@ -30,6 +30,8 @@ def test_get_treatment(self, mocker): type(split_mock).conditions = conditions_mock storage_mock = mocker.Mock(spec=SplitStorage) storage_mock.fetch_many.return_value = {'some_feature': split_mock} + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -40,7 +42,7 @@ def test_get_treatment(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -268,6 +270,8 @@ def _configs(treatment): split_mock.get_configurations_for.side_effect = _configs storage_mock = mocker.Mock(spec=SplitStorage) storage_mock.fetch_many.return_value = {'some_feature': split_mock} + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -278,7 +282,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -819,6 +823,9 @@ def test_get_treatments(self, mocker): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} + impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() telemetry_producer = TelemetryStorageProducer(telemetry_storage) @@ -828,7 +835,7 @@ def test_get_treatments(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -963,6 +970,8 @@ def test_get_treatments_with_config(self, mocker): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -973,7 +982,7 @@ def test_get_treatments_with_config(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1108,6 +1117,8 @@ def test_get_treatments_by_flag_set(self, mocker): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} storage_mock.get_feature_flags_by_sets.return_value = ['some_feature'] impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -1118,7 +1129,7 @@ def test_get_treatments_by_flag_set(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1224,6 +1235,8 @@ def test_get_treatments_by_flag_sets(self, mocker): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} storage_mock.get_feature_flags_by_sets.return_value = ['some_feature'] impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = InMemoryTelemetryStorage() @@ -1234,7 +1247,7 @@ def test_get_treatments_by_flag_sets(self, mocker): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1349,6 +1362,9 @@ def _configs(treatment): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} + storage_mock.get_feature_flags_by_sets.return_value = ['some_feature'] impmanager = mocker.Mock(spec=ImpressionManager) @@ -1360,7 +1376,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1469,6 +1485,9 @@ def _configs(treatment): storage_mock.fetch_many.return_value = { 'some_feature': split_mock } + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) + rbs_storage.fetch_many.return_value = {} + storage_mock.get_feature_flags_by_sets.return_value = ['some_feature'] impmanager = mocker.Mock(spec=ImpressionManager) @@ -1480,7 +1499,7 @@ def _configs(treatment): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1619,6 +1638,10 @@ async def fetch_many(*_): 'some_feature': split_mock } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs async def get_change_number(*_): return 1 @@ -1633,7 +1656,7 @@ async def get_change_number(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -1876,6 +1899,10 @@ async def fetch_many(*_): 'some_feature': split_mock } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs async def get_change_number(*_): return 1 @@ -1890,7 +1917,7 @@ async def get_change_number(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2409,6 +2436,10 @@ async def fetch_many(*_): 'some': split_mock, } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -2419,7 +2450,7 @@ async def fetch_many(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2568,6 +2599,10 @@ async def fetch_many(*_): 'some_feature': split_mock } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -2578,7 +2613,7 @@ async def fetch_many(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2730,6 +2765,10 @@ async def fetch_many(*_): async def get_feature_flags_by_sets(*_): return ['some_feature'] storage_mock.get_feature_flags_by_sets = get_feature_flags_by_sets + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs impmanager = mocker.Mock(spec=ImpressionManager) telemetry_storage = await InMemoryTelemetryStorageAsync.create() @@ -2740,7 +2779,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -2867,6 +2906,11 @@ async def fetch_many(*_): 'some': split_mock, } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs + async def get_feature_flags_by_sets(*_): return ['some_feature'] storage_mock.get_feature_flags_by_sets = get_feature_flags_by_sets @@ -2880,7 +2924,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3017,6 +3061,10 @@ async def fetch_many(*_): 'some': split_mock, } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs async def get_feature_flags_by_sets(*_): return ['some_feature'] storage_mock.get_feature_flags_by_sets = get_feature_flags_by_sets @@ -3030,7 +3078,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, @@ -3160,6 +3208,11 @@ async def fetch_many(*_): 'some': split_mock, } storage_mock.fetch_many = fetch_many + rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorageAsync) + async def fetch_many_rbs(*_): + return {} + rbs_storage.fetch_many = fetch_many_rbs + async def get_feature_flags_by_sets(*_): return ['some_feature'] storage_mock.get_feature_flags_by_sets = get_feature_flags_by_sets @@ -3173,7 +3226,7 @@ async def get_feature_flags_by_sets(*_): { 'splits': storage_mock, 'segments': mocker.Mock(spec=SegmentStorage), - 'rule_based_segments': mocker.Mock(spec=RuleBasedSegmentsStorage), + 'rule_based_segments': rbs_storage, 'impressions': mocker.Mock(spec=ImpressionStorage), 'events': mocker.Mock(spec=EventStorage), }, diff --git a/tests/engine/files/rule_base_segments.json b/tests/engine/files/rule_base_segments.json index 0ab3495b..70b64b32 100644 --- a/tests/engine/files/rule_base_segments.json +++ b/tests/engine/files/rule_base_segments.json @@ -8,6 +8,7 @@ "excluded":{"keys":["mauro@split.io","gaston@split.io"],"segments":[]}, "conditions": [ { + "conditionType": "WHITELIST", "matcherGroup": { "combiner": "AND", "matchers": [ diff --git a/tests/engine/files/rule_base_segments2.json b/tests/engine/files/rule_base_segments2.json index fa2b006b..d5c28829 100644 --- a/tests/engine/files/rule_base_segments2.json +++ b/tests/engine/files/rule_base_segments2.json @@ -19,11 +19,11 @@ "trafficType": "user", "attribute": "email" }, - "matcherType": "ENDS_WITH", + "matcherType": "START_WITH", "negate": false, "whitelistMatcherData": { "whitelist": [ - "@split.io" + "bilal" ] } } diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index fe082ce2..08e89371 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -118,7 +118,7 @@ def _build_evaluator_with_mocks(self, mocker): e = evaluator.Evaluator(splitter_mock) evaluator._LOGGER = logger_mock return e - + def test_evaluate_treatment_killed_split(self, mocker): """Test that a killed split returns the default treatment.""" e = self._build_evaluator_with_mocks(mocker) @@ -127,7 +127,8 @@ def test_evaluate_treatment_killed_split(self, mocker): mocked_split.killed = True mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) + + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'off' assert result['configurations'] == '{"some_property": 123}' @@ -145,7 +146,7 @@ def test_evaluate_treatment_ok(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == '{"some_property": 123}' @@ -164,7 +165,7 @@ def test_evaluate_treatment_ok_no_config(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' assert result['configurations'] == None @@ -191,7 +192,7 @@ def test_evaluate_treatments(self, mocker): mocked_split2.change_number = 123 mocked_split2.get_configurations_for.return_value = None - ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) + ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), rbs_segments={}) results = e.eval_many_with_context('some_key', 'some_bucketing_key', ['feature2', 'feature4'], {}, ctx) result = results['feature4'] assert result['configurations'] == None @@ -214,7 +215,7 @@ def test_get_gtreatment_for_split_no_condition_matches(self, mocker): mocked_split.change_number = '123' mocked_split.conditions = [] mocked_split.get_configurations_for = None - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={}, segment_rbs_conditions={}, excluded_rbs_segments={}) + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) assert e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, ctx) == ( 'off', Label.NO_CONDITION_MATCHED @@ -231,7 +232,7 @@ def test_get_gtreatment_for_split_non_rollout(self, mocker): mocked_split = mocker.Mock(spec=Split) mocked_split.killed = False mocked_split.conditions = [mocked_condition_1] - treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None, None, None)) + treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None)) assert treatment == 'on' assert label == 'some_label' @@ -240,14 +241,11 @@ def test_evaluate_treatment_with_rule_based_segment(self, mocker): e = evaluator.Evaluator(splitters.Splitter()) mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': False}, segment_rbs_conditions={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw).conditions}, excluded_rbs_segments={}) + + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw)}) result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) assert result['treatment'] == 'on' - - ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), segment_rbs_memberships={'sample_rule_based_segment': True}, segment_rbs_conditions={'sample_rule_based_segment': []}, excluded_rbs_segments={}) - result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) - assert result['treatment'] == 'off' - + def test_evaluate_treatment_with_rbs_in_condition(self): e = evaluator.Evaluator(splitters.Splitter()) splits_storage = InMemorySplitStorage() @@ -267,10 +265,10 @@ def test_evaluate_treatment_with_rbs_in_condition(self): ctx = evaluation_facctory.context_for('bilal@split.io', ['some']) assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + ctx = evaluation_facctory.context_for('mauro@split.io', ['some']) assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "off" - - + def test_using_segment_in_excluded(self): rbs_segments = os.path.join(os.path.dirname(__file__), 'files', 'rule_base_segments3.json') with open(rbs_segments, 'r') as flo: @@ -312,10 +310,10 @@ def test_using_rbs_in_excluded(self): splits_storage.update([mocked_split], [], 12) ctx = evaluation_facctory.context_for('bilal@split.io', ['some']) - assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" - ctx = evaluation_facctory.context_for('mauro@split.io', ['some']) - assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "on" - + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "off" + ctx = evaluation_facctory.context_for('bilal', ['some']) + assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" + @pytest.mark.asyncio async def test_evaluate_treatment_with_rbs_in_condition_async(self): e = evaluator.Evaluator(splitters.Splitter()) @@ -382,9 +380,9 @@ async def test_using_rbs_in_excluded_async(self): await splits_storage.update([mocked_split], [], 12) ctx = await evaluation_facctory.context_for('bilal@split.io', ['some']) - assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" - ctx = await evaluation_facctory.context_for('mauro@split.io', ['some']) - assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'some', {'email': 'mauro@split.io'}, ctx)['treatment'] == "on" + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "off" + ctx = await evaluation_facctory.context_for('bilal', ['some']) + assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" @@ -417,14 +415,12 @@ def test_get_context(self): eval_factory = EvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) ec = eval_factory.context_for('bilal@split.io', ['some']) - assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": False} segment_storage.update("employees", {"mauro@split.io"}, {}, 1234) ec = eval_factory.context_for('mauro@split.io', ['some']) - assert ec.segment_rbs_conditions == {} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} + assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": True} class EvaluationDataFactoryAsyncTests(object): @@ -459,12 +455,10 @@ async def test_get_context(self): eval_factory = AsyncEvaluationDataFactory(flag_storage, segment_storage, rbs_segment_storage) ec = await eval_factory.context_for('bilal@split.io', ['some']) - assert ec.segment_rbs_conditions == {'sample_rule_based_segment': rbs.conditions} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': False} + assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": False} await segment_storage.update("employees", {"mauro@split.io"}, {}, 1234) ec = await eval_factory.context_for('mauro@split.io', ['some']) - assert ec.segment_rbs_conditions == {} - assert ec.segment_rbs_memberships == {'sample_rule_based_segment': True} + assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": True} diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index 140968ce..f16352e3 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -4343,7 +4343,7 @@ async def clear_cache(self): redis_client = await build_async(DEFAULT_CONFIG.copy()) for key in keys_to_delete: await redis_client.delete(key) - + async def _validate_last_impressions_async(client, *to_validate): """Validate the last N impressions are present disregarding the order.""" imp_storage = client._factory._get_storage('impressions') diff --git a/tests/models/grammar/test_matchers.py b/tests/models/grammar/test_matchers.py index 12e4bda8..680a8cc7 100644 --- a/tests/models/grammar/test_matchers.py +++ b/tests/models/grammar/test_matchers.py @@ -405,9 +405,9 @@ def test_matcher_behaviour(self, mocker): matcher = matchers.UserDefinedSegmentMatcher(self.raw) # Test that if the key if the storage wrapper finds the key in the segment, it matches. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True}, {}, {}, {})}) is True + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([],{'some_segment': True}, {})}) is True # Test that if the key if the storage wrapper doesn't find the key in the segment, it fails. - assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False}, {}, {}, {})}) is False + assert matcher.evaluate('some_key', {}, {'evaluator': None, 'ec': EvaluationContext([], {'some_segment': False}, {})}) is False def test_to_json(self): """Test that the object serializes to JSON properly.""" @@ -1130,11 +1130,9 @@ def test_matcher_behaviour(self, mocker): rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) matcher = matchers.RuleBasedSegmentMatcher(self.raw) ec ={'ec': EvaluationContext( - {}, {}, - {}, - {"sample_rule_based_segment": rbs.conditions}, - {} + {"segment1": False}, + {"sample_rule_based_segment": rbs} )} assert matcher._match(None, context=ec) is False assert matcher._match('bilal@split.io', context=ec) is False diff --git a/tests/storage/test_redis.py b/tests/storage/test_redis.py index 04ddfc60..4537998c 100644 --- a/tests/storage/test_redis.py +++ b/tests/storage/test_redis.py @@ -1289,6 +1289,25 @@ def test_contains(self, mocker): assert not storage.contains(['segment1', 'segment4']) assert storage.contains(['segment1']) assert not storage.contains(['segment4', 'segment5']) + + def test_fetch_many(self, mocker): + """Test retrieving a list of passed splits.""" + adapter = mocker.Mock(spec=RedisAdapter) + storage = RedisRuleBasedSegmentsStorage(adapter) + from_raw = mocker.Mock() + mocker.patch('splitio.storage.redis.rule_based_segments.from_raw', new=from_raw) + + adapter.mget.return_value = ['{"name": "rbs1"}', '{"name": "rbs2"}', None] + + result = storage.fetch_many(['rbs1', 'rbs2', 'rbs3']) + assert len(result) == 3 + + assert mocker.call({'name': 'rbs1'}) in from_raw.mock_calls + assert mocker.call({'name': 'rbs2'}) in from_raw.mock_calls + + assert result['rbs1'] is not None + assert result['rbs2'] is not None + assert 'rbs3' in result class RedisRuleBasedSegmentStorageAsyncTests(object): """Redis rule based segment storage test cases.""" @@ -1391,3 +1410,25 @@ async def keys(sel, key): assert not await storage.contains(['segment1', 'segment4']) assert await storage.contains(['segment1']) assert not await storage.contains(['segment4', 'segment5']) + + @pytest.mark.asyncio + async def test_fetch_many(self, mocker): + """Test retrieving a list of passed splits.""" + adapter = mocker.Mock(spec=RedisAdapter) + storage = RedisRuleBasedSegmentsStorageAsync(adapter) + from_raw = mocker.Mock() + mocker.patch('splitio.storage.redis.rule_based_segments.from_raw', new=from_raw) + async def mget(*_): + return ['{"name": "rbs1"}', '{"name": "rbs2"}', None] + adapter.mget = mget + + result = await storage.fetch_many(['rbs1', 'rbs2', 'rbs3']) + assert len(result) == 3 + + assert mocker.call({'name': 'rbs1'}) in from_raw.mock_calls + assert mocker.call({'name': 'rbs2'}) in from_raw.mock_calls + + assert result['rbs1'] is not None + assert result['rbs2'] is not None + assert 'rbs3' in result + From 6fccf996f6722bdc6dbfa69cf215d04433fce78e Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 12 May 2025 22:10:01 -0700 Subject: [PATCH 27/56] updated test --- tests/engine/files/rule_base_segments2.json | 4 ++++ tests/engine/test_evaluator.py | 2 ++ 2 files changed, 6 insertions(+) diff --git a/tests/engine/files/rule_base_segments2.json b/tests/engine/files/rule_base_segments2.json index d5c28829..fbc40d51 100644 --- a/tests/engine/files/rule_base_segments2.json +++ b/tests/engine/files/rule_base_segments2.json @@ -37,6 +37,10 @@ "name": "no_excludes", "status": "ACTIVE", "trafficTypeName": "user", + "excluded":{ + "keys":["bilal2"], + "segments":[] + }, "conditions": [ { "matcherGroup": { diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 08e89371..6da8e3b5 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -313,6 +313,8 @@ def test_using_rbs_in_excluded(self): assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "off" ctx = evaluation_facctory.context_for('bilal', ['some']) assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" + ctx = evaluation_facctory.context_for('bilal2', ['some']) + assert e.eval_with_context('bilal2', 'bilal2', 'some', {'email': 'bilal2'}, ctx)['treatment'] == "off" @pytest.mark.asyncio async def test_evaluate_treatment_with_rbs_in_condition_async(self): From 98a685290df6f02b740ba70dc550f7ac33c32851 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 12 May 2025 22:12:59 -0700 Subject: [PATCH 28/56] polish --- tests/engine/test_evaluator.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 6da8e3b5..8bfa27c6 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -385,6 +385,8 @@ async def test_using_rbs_in_excluded_async(self): assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "off" ctx = await evaluation_facctory.context_for('bilal', ['some']) assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" + ctx = await evaluation_facctory.context_for('bilal2', ['some']) + assert e.eval_with_context('bilal2', 'bilal2', 'some', {'email': 'bilal2'}, ctx)['treatment'] == "off" class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" From 333919c33a72ad8b4b1bebe28cb01dd3a5ccf9c9 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Wed, 14 May 2025 10:52:09 -0700 Subject: [PATCH 29/56] fix matcher and test --- splitio/models/grammar/matchers/rule_based_segment.py | 2 +- tests/engine/files/rule_base_segments2.json | 2 +- tests/engine/test_evaluator.py | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index 3e12a348..5d4a9a09 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -63,7 +63,7 @@ def _match_dep_rb_segments(self, excluded_rb_segments, key, attributes, context) else: excluded_segment = context['ec'].rbs_segments.get(excluded_rb_segment.name) if key in excluded_segment.excluded.get_excluded_keys(): - return True + return False if self._match_dep_rb_segments(excluded_segment.excluded.get_excluded_segments(), key, attributes, context): return True diff --git a/tests/engine/files/rule_base_segments2.json b/tests/engine/files/rule_base_segments2.json index fbc40d51..ee356fd8 100644 --- a/tests/engine/files/rule_base_segments2.json +++ b/tests/engine/files/rule_base_segments2.json @@ -38,7 +38,7 @@ "status": "ACTIVE", "trafficTypeName": "user", "excluded":{ - "keys":["bilal2"], + "keys":["bilal2@split.io"], "segments":[] }, "conditions": [ diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 8bfa27c6..a2937126 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -313,8 +313,8 @@ def test_using_rbs_in_excluded(self): assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "off" ctx = evaluation_facctory.context_for('bilal', ['some']) assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" - ctx = evaluation_facctory.context_for('bilal2', ['some']) - assert e.eval_with_context('bilal2', 'bilal2', 'some', {'email': 'bilal2'}, ctx)['treatment'] == "off" + ctx = evaluation_facctory.context_for('bilal2@split.io', ['some']) + assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "on" @pytest.mark.asyncio async def test_evaluate_treatment_with_rbs_in_condition_async(self): @@ -385,8 +385,8 @@ async def test_using_rbs_in_excluded_async(self): assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx)['treatment'] == "off" ctx = await evaluation_facctory.context_for('bilal', ['some']) assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" - ctx = await evaluation_facctory.context_for('bilal2', ['some']) - assert e.eval_with_context('bilal2', 'bilal2', 'some', {'email': 'bilal2'}, ctx)['treatment'] == "off" + ctx = await evaluation_facctory.context_for('bilal2@split.io', ['some']) + assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "on" class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" From 1bd96ab45508a77390e3bfba42923185693a3e4c Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Wed, 14 May 2025 15:26:03 -0700 Subject: [PATCH 30/56] Update splitio/models/grammar/matchers/rule_based_segment.py Co-authored-by: Mauro Sanz <51236193+sanzmauro@users.noreply.github.com> --- splitio/models/grammar/matchers/rule_based_segment.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index 5d4a9a09..06baf4b2 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -68,7 +68,4 @@ def _match_dep_rb_segments(self, excluded_rb_segments, key, attributes, context) if self._match_dep_rb_segments(excluded_segment.excluded.get_excluded_segments(), key, attributes, context): return True - if self._match_conditions(excluded_segment.conditions, key, attributes, context): - return True - - return False + return self._match_conditions(excluded_segment.conditions, key, attributes, context) From ba4e34775df695797d738d5b41ecd6bcd2c37b33 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Thu, 15 May 2025 13:40:05 -0700 Subject: [PATCH 31/56] Fix initial segment fetch --- splitio/client/factory.py | 2 +- splitio/storage/inmemmory.py | 2 +- splitio/sync/segment.py | 7 ++++++- splitio/sync/split.py | 2 +- splitio/util/storage_helper.py | 15 +++++++++++++++ 5 files changed, 24 insertions(+), 4 deletions(-) diff --git a/splitio/client/factory.py b/splitio/client/factory.py index 7c56819f..0d2fdbb0 100644 --- a/splitio/client/factory.py +++ b/splitio/client/factory.py @@ -564,7 +564,7 @@ def _build_in_memory_factory(api_key, cfg, sdk_url=None, events_url=None, # pyl synchronizers = SplitSynchronizers( SplitSynchronizer(apis['splits'], storages['splits'], storages['rule_based_segments']), - SegmentSynchronizer(apis['segments'], storages['splits'], storages['segments']), + SegmentSynchronizer(apis['segments'], storages['splits'], storages['segments'], storages['rule_based_segments']), ImpressionSynchronizer(apis['impressions'], storages['impressions'], cfg['impressionsBulkSize']), EventSynchronizer(apis['events'], storages['events'], cfg['eventsBulkSize']), diff --git a/splitio/storage/inmemmory.py b/splitio/storage/inmemmory.py index c7c1a7bf..e1740b72 100644 --- a/splitio/storage/inmemmory.py +++ b/splitio/storage/inmemmory.py @@ -233,7 +233,7 @@ def contains(self, segment_names): def fetch_many(self, segment_names): return {rb_segment_name: self.get(rb_segment_name) for rb_segment_name in segment_names} - + class InMemoryRuleBasedSegmentStorageAsync(RuleBasedSegmentsStorage): """InMemory implementation of a feature flag storage base.""" def __init__(self): diff --git a/splitio/sync/segment.py b/splitio/sync/segment.py index 59d9fad8..2550b586 100644 --- a/splitio/sync/segment.py +++ b/splitio/sync/segment.py @@ -10,6 +10,7 @@ from splitio.util.backoff import Backoff from splitio.optional.loaders import asyncio, aiofiles from splitio.sync import util +from splitio.util.storage_helper import get_standard_segment_names_in_rbs_storage from splitio.optional.loaders import asyncio _LOGGER = logging.getLogger(__name__) @@ -22,7 +23,7 @@ class SegmentSynchronizer(object): - def __init__(self, segment_api, feature_flag_storage, segment_storage): + def __init__(self, segment_api, feature_flag_storage, segment_storage, rule_based_segment_storage): """ Class constructor. @@ -39,6 +40,7 @@ def __init__(self, segment_api, feature_flag_storage, segment_storage): self._api = segment_api self._feature_flag_storage = feature_flag_storage self._segment_storage = segment_storage + self._rule_based_segment_storage = rule_based_segment_storage self._worker_pool = workerpool.WorkerPool(_MAX_WORKERS, self.synchronize_segment) self._worker_pool.start() self._backoff = Backoff( @@ -182,8 +184,11 @@ def synchronize_segments(self, segment_names = None, dont_wait = False): """ if segment_names is None: segment_names = self._feature_flag_storage.get_segment_names() + segment_names.update(get_standard_segment_names_in_rbs_storage(self._rule_based_segment_storage)) for segment_name in segment_names: + _LOGGER.debug("Adding segment name to sync worker") + _LOGGER.debug(segment_name) self._worker_pool.submit_work(segment_name) if (dont_wait): return True diff --git a/splitio/sync/split.py b/splitio/sync/split.py index dfc58811..1d1722f6 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -139,7 +139,7 @@ def _fetch_until(self, fetch_options, till=None, rbs_till=None): rbs_segment_list = update_rule_based_segment_storage(self._rule_based_segment_storage, fetched_rule_based_segments, feature_flag_changes.get('rbs')['t'], self._api.clear_storage) fetched_feature_flags = [(splits.from_raw(feature_flag)) for feature_flag in feature_flag_changes.get('ff').get('d', [])] - segment_list = update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t'], self._api.clear_storage) + segment_list.update(update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t'], self._api.clear_storage)) segment_list.update(rbs_segment_list) if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: diff --git a/splitio/util/storage_helper.py b/splitio/util/storage_helper.py index ad5d93eb..cd50856b 100644 --- a/splitio/util/storage_helper.py +++ b/splitio/util/storage_helper.py @@ -70,6 +70,21 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg def _get_segment_names(excluded_segments): return [excluded_segment.name for excluded_segment in excluded_segments] +def get_standard_segment_names_in_rbs_storage(rule_based_segment_storage): + """ + Retrieve a list of all standard segments names. + + :return: Set of segment names. + :rtype: Set(str) + """ + segment_list = set() + for rb_segment in rule_based_segment_storage.get_segment_names(): + rb_segment_obj = rule_based_segment_storage.get(rb_segment) + segment_list.update(set(_get_segment_names(rb_segment_obj.excluded.get_excluded_segments()))) + segment_list.update(rb_segment_obj.get_condition_segment_names()) + + return segment_list + async def update_feature_flag_storage_async(feature_flag_storage, feature_flags, change_number, clear_storage=False): """ Update feature flag storage from given list of feature flags while checking the flag set logic From 066b78f24c5051f8a342b9338edc2af2db607f7d Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Thu, 15 May 2025 21:07:57 -0700 Subject: [PATCH 32/56] polish --- splitio/client/factory.py | 2 +- splitio/models/rule_based_segments.py | 8 ++ splitio/sync/segment.py | 10 ++- splitio/util/storage_helper.py | 26 ++++-- tests/sync/test_segments_synchronizer.py | 102 ++++++++++++++++++----- tests/sync/test_synchronizer.py | 21 +++-- tests/tasks/test_segment_sync.py | 56 +++++++++---- tests/util/test_storage_helper.py | 14 +++- 8 files changed, 183 insertions(+), 56 deletions(-) diff --git a/splitio/client/factory.py b/splitio/client/factory.py index 0d2fdbb0..f6070243 100644 --- a/splitio/client/factory.py +++ b/splitio/client/factory.py @@ -693,7 +693,7 @@ async def _build_in_memory_factory_async(api_key, cfg, sdk_url=None, events_url= synchronizers = SplitSynchronizers( SplitSynchronizerAsync(apis['splits'], storages['splits'], storages['rule_based_segments']), - SegmentSynchronizerAsync(apis['segments'], storages['splits'], storages['segments']), + SegmentSynchronizerAsync(apis['segments'], storages['splits'], storages['segments'], storages['rule_based_segments']), ImpressionSynchronizerAsync(apis['impressions'], storages['impressions'], cfg['impressionsBulkSize']), EventSynchronizerAsync(apis['events'], storages['events'], cfg['eventsBulkSize']), diff --git a/splitio/models/rule_based_segments.py b/splitio/models/rule_based_segments.py index dd964055..f7bf3f4d 100644 --- a/splitio/models/rule_based_segments.py +++ b/splitio/models/rule_based_segments.py @@ -152,6 +152,14 @@ def get_excluded_segments(self): """Return excluded segments""" return self._segments + def get_excluded_standard_segments(self): + """Return excluded segments""" + to_return = [] + for segment in self._segments: + if segment.type == SegmentType.STANDARD: + to_return.append(segment.name) + return to_return + def to_json(self): """Return a JSON representation of this object.""" return { diff --git a/splitio/sync/segment.py b/splitio/sync/segment.py index 2550b586..a87759e1 100644 --- a/splitio/sync/segment.py +++ b/splitio/sync/segment.py @@ -10,7 +10,7 @@ from splitio.util.backoff import Backoff from splitio.optional.loaders import asyncio, aiofiles from splitio.sync import util -from splitio.util.storage_helper import get_standard_segment_names_in_rbs_storage +from splitio.util.storage_helper import get_standard_segment_names_in_rbs_storage, get_standard_segment_names_in_rbs_storage_async from splitio.optional.loaders import asyncio _LOGGER = logging.getLogger(__name__) @@ -183,7 +183,7 @@ def synchronize_segments(self, segment_names = None, dont_wait = False): :rtype: bool """ if segment_names is None: - segment_names = self._feature_flag_storage.get_segment_names() + segment_names = set(self._feature_flag_storage.get_segment_names()) segment_names.update(get_standard_segment_names_in_rbs_storage(self._rule_based_segment_storage)) for segment_name in segment_names: @@ -209,7 +209,7 @@ def segment_exist_in_storage(self, segment_name): class SegmentSynchronizerAsync(object): - def __init__(self, segment_api, feature_flag_storage, segment_storage): + def __init__(self, segment_api, feature_flag_storage, segment_storage, rule_based_segment_storage): """ Class constructor. @@ -226,6 +226,7 @@ def __init__(self, segment_api, feature_flag_storage, segment_storage): self._api = segment_api self._feature_flag_storage = feature_flag_storage self._segment_storage = segment_storage + self._rule_based_segment_storage = rule_based_segment_storage self._worker_pool = workerpool.WorkerPoolAsync(_MAX_WORKERS, self.synchronize_segment) self._worker_pool.start() self._backoff = Backoff( @@ -369,7 +370,8 @@ async def synchronize_segments(self, segment_names = None, dont_wait = False): :rtype: bool """ if segment_names is None: - segment_names = await self._feature_flag_storage.get_segment_names() + segment_names = set(await self._feature_flag_storage.get_segment_names()) + segment_names.update(await get_standard_segment_names_in_rbs_storage_async(self._rule_based_segment_storage)) self._jobs = await self._worker_pool.submit_work(segment_names) if (dont_wait): diff --git a/splitio/util/storage_helper.py b/splitio/util/storage_helper.py index cd50856b..81fdef65 100644 --- a/splitio/util/storage_helper.py +++ b/splitio/util/storage_helper.py @@ -1,6 +1,7 @@ """Storage Helper.""" import logging from splitio.models import splits +from splitio.models import rule_based_segments _LOGGER = logging.getLogger(__name__) @@ -58,7 +59,7 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg for rule_based_segment in rule_based_segments: if rule_based_segment.status == splits.Status.ACTIVE: to_add.append(rule_based_segment) - segment_list.update(set(_get_segment_names(rule_based_segment.excluded.get_excluded_segments()))) + segment_list.update(set(rule_based_segment.excluded.get_excluded_standard_segments())) segment_list.update(rule_based_segment.get_condition_segment_names()) else: if rule_based_segment_storage.get(rule_based_segment.name) is not None: @@ -66,9 +67,6 @@ def update_rule_based_segment_storage(rule_based_segment_storage, rule_based_seg rule_based_segment_storage.update(to_add, to_delete, change_number) return segment_list - -def _get_segment_names(excluded_segments): - return [excluded_segment.name for excluded_segment in excluded_segments] def get_standard_segment_names_in_rbs_storage(rule_based_segment_storage): """ @@ -80,7 +78,7 @@ def get_standard_segment_names_in_rbs_storage(rule_based_segment_storage): segment_list = set() for rb_segment in rule_based_segment_storage.get_segment_names(): rb_segment_obj = rule_based_segment_storage.get(rb_segment) - segment_list.update(set(_get_segment_names(rb_segment_obj.excluded.get_excluded_segments()))) + segment_list.update(set(rb_segment_obj.excluded.get_excluded_standard_segments())) segment_list.update(rb_segment_obj.get_condition_segment_names()) return segment_list @@ -139,7 +137,7 @@ async def update_rule_based_segment_storage_async(rule_based_segment_storage, ru for rule_based_segment in rule_based_segments: if rule_based_segment.status == splits.Status.ACTIVE: to_add.append(rule_based_segment) - segment_list.update(set(_get_segment_names(rule_based_segment.excluded.get_excluded_segments()))) + segment_list.update(set(rule_based_segment.excluded.get_excluded_standard_segments())) segment_list.update(rule_based_segment.get_condition_segment_names()) else: if await rule_based_segment_storage.get(rule_based_segment.name) is not None: @@ -148,6 +146,22 @@ async def update_rule_based_segment_storage_async(rule_based_segment_storage, ru await rule_based_segment_storage.update(to_add, to_delete, change_number) return segment_list +async def get_standard_segment_names_in_rbs_storage_async(rule_based_segment_storage): + """ + Retrieve a list of all standard segments names. + + :return: Set of segment names. + :rtype: Set(str) + """ + segment_list = set() + segment_names = await rule_based_segment_storage.get_segment_names() + for rb_segment in segment_names: + rb_segment_obj = await rule_based_segment_storage.get(rb_segment) + segment_list.update(set(rb_segment_obj.excluded.get_excluded_standard_segments())) + segment_list.update(rb_segment_obj.get_condition_segment_names()) + + return segment_list + def get_valid_flag_sets(flag_sets, flag_set_filter): """ Check each flag set in given array, return it if exist in a given config flag set array, if config array is empty return all diff --git a/tests/sync/test_segments_synchronizer.py b/tests/sync/test_segments_synchronizer.py index 5a6ef849..e88db2fa 100644 --- a/tests/sync/test_segments_synchronizer.py +++ b/tests/sync/test_segments_synchronizer.py @@ -5,10 +5,11 @@ from splitio.util.backoff import Backoff from splitio.api import APIException from splitio.api.commons import FetchOptions -from splitio.storage import SplitStorage, SegmentStorage +from splitio.storage import SplitStorage, SegmentStorage, RuleBasedSegmentsStorage from splitio.storage.inmemmory import InMemorySegmentStorage, InMemorySegmentStorageAsync, InMemorySplitStorage, InMemorySplitStorageAsync from splitio.sync.segment import SegmentSynchronizer, SegmentSynchronizerAsync, LocalSegmentSynchronizer, LocalSegmentSynchronizerAsync from splitio.models.segments import Segment +from splitio.models import rule_based_segments from splitio.optional.loaders import aiofiles, asyncio import pytest @@ -23,6 +24,8 @@ def test_synchronize_segments_error(self, mocker): storage = mocker.Mock(spec=SegmentStorage) storage.get_change_number.return_value = -1 + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] api = mocker.Mock() @@ -30,7 +33,7 @@ def run(x): raise APIException("something broke") api.fetch_segment.side_effect = run - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) assert not segments_synchronizer.synchronize_segments() def test_synchronize_segments(self, mocker): @@ -38,6 +41,10 @@ def test_synchronize_segments(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) split_storage.get_segment_names.return_value = ['segmentA', 'segmentB', 'segmentC'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = ['rbs'] + rbs_storage.get.return_value = rule_based_segments.from_raw({'name': 'rbs', 'conditions': [], 'trafficTypeName': 'user', 'changeNumber': 123, 'status': 'ACTIVE', 'excluded': {'keys': [], 'segments': [{'type': 'standard', 'name': 'segmentD'}]}}) + # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. storage = mocker.Mock(spec=SegmentStorage) @@ -52,10 +59,14 @@ def change_number_mock(segment_name): if segment_name == 'segmentC' and change_number_mock._count_c == 0: change_number_mock._count_c = 1 return -1 + if segment_name == 'segmentD' and change_number_mock._count_d == 0: + change_number_mock._count_d = 1 + return -1 return 123 change_number_mock._count_a = 0 change_number_mock._count_b = 0 change_number_mock._count_c = 0 + change_number_mock._count_d = 0 storage.get_change_number.side_effect = change_number_mock # Setup a mocked segment api to return segments mentioned before. @@ -72,27 +83,35 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 1 return {'name': 'segmentC', 'added': ['key7', 'key8', 'key9'], 'removed': [], 'since': -1, 'till': 123} + if segment_name == 'segmentD' and fetch_segment_mock._count_d == 0: + fetch_segment_mock._count_d = 1 + return {'name': 'segmentD', 'added': ['key10'], 'removed': [], + 'since': -1, 'till': 123} return {'added': [], 'removed': [], 'since': 123, 'till': 123} fetch_segment_mock._count_a = 0 fetch_segment_mock._count_b = 0 fetch_segment_mock._count_c = 0 + fetch_segment_mock._count_d = 0 api = mocker.Mock() api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) assert segments_synchronizer.synchronize_segments() api_calls = [call for call in api.fetch_segment.mock_calls] + assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentD', -1, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentB', 123, FetchOptions(True, None, None, None, None)) in api_calls assert mocker.call('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert mocker.call('segmentD', 123, FetchOptions(True, None, None, None, None)) in api_calls segment_put_calls = storage.put.mock_calls - segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) + segments_to_validate = set(['segmentA', 'segmentB', 'segmentC', 'segmentD']) for call in segment_put_calls: _, positional_args, _ = call segment = positional_args[0] @@ -104,6 +123,8 @@ def test_synchronize_segment(self, mocker): """Test particular segment update.""" split_storage = mocker.Mock(spec=SplitStorage) storage = mocker.Mock(spec=SegmentStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] def change_number_mock(segment_name): if change_number_mock._count_a == 0: @@ -124,7 +145,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): api = mocker.Mock() api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) segments_synchronizer.synchronize_segment('segmentA') api_calls = [call for call in api.fetch_segment.mock_calls] @@ -137,6 +158,8 @@ def test_synchronize_segment_cdn(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) storage = mocker.Mock(spec=SegmentStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] def change_number_mock(segment_name): change_number_mock._count_a += 1 @@ -170,7 +193,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): api = mocker.Mock() api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) segments_synchronizer.synchronize_segment('segmentA') assert mocker.call('segmentA', -1, FetchOptions(True, None, None, None, None)) in api.fetch_segment.mock_calls @@ -183,7 +206,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): def test_recreate(self, mocker): """Test recreate logic.""" - segments_synchronizer = SegmentSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock()) + segments_synchronizer = SegmentSynchronizer(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) current_pool = segments_synchronizer._worker_pool segments_synchronizer.recreate() assert segments_synchronizer._worker_pool != current_pool @@ -196,6 +219,11 @@ class SegmentsSynchronizerAsyncTests(object): async def test_synchronize_segments_error(self, mocker): """On error.""" split_storage = mocker.Mock(spec=SplitStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs async def get_segment_names(): return ['segmentA', 'segmentB', 'segmentC'] @@ -215,7 +243,7 @@ async def run(*args): raise APIException("something broke") api.fetch_segment = run - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) assert not await segments_synchronizer.synchronize_segments() await segments_synchronizer.shutdown() @@ -227,6 +255,15 @@ async def get_segment_names(): return ['segmentA', 'segmentB', 'segmentC'] split_storage.get_segment_names = get_segment_names + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + async def get_segment_names_rbs(): + return ['rbs'] + rbs_storage.get_segment_names = get_segment_names_rbs + + async def get_rbs(segment_name): + return rule_based_segments.from_raw({'name': 'rbs', 'conditions': [], 'trafficTypeName': 'user', 'changeNumber': 123, 'status': 'ACTIVE', 'excluded': {'keys': [], 'segments': [{'type': 'standard', 'name': 'segmentD'}]}}) + rbs_storage.get = get_rbs + # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. storage = mocker.Mock(spec=SegmentStorage) @@ -241,10 +278,14 @@ async def change_number_mock(segment_name): if segment_name == 'segmentC' and change_number_mock._count_c == 0: change_number_mock._count_c = 1 return -1 + if segment_name == 'segmentD' and change_number_mock._count_d == 0: + change_number_mock._count_d = 1 + return -1 return 123 change_number_mock._count_a = 0 change_number_mock._count_b = 0 change_number_mock._count_c = 0 + change_number_mock._count_d = 0 storage.get_change_number = change_number_mock self.segment_put = [] @@ -276,25 +317,36 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_segment_mock._count_c = 1 return {'name': 'segmentC', 'added': ['key7', 'key8', 'key9'], 'removed': [], 'since': -1, 'till': 123} + if segment_name == 'segmentD' and fetch_segment_mock._count_d == 0: + fetch_segment_mock._count_d = 1 + return {'name': 'segmentD', 'added': ['key10'], 'removed': [], + 'since': -1, 'till': 123} return {'added': [], 'removed': [], 'since': 123, 'till': 123} fetch_segment_mock._count_a = 0 fetch_segment_mock._count_b = 0 fetch_segment_mock._count_c = 0 + fetch_segment_mock._count_d = 0 api = mocker.Mock() api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) assert await segments_synchronizer.synchronize_segments() - assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) - assert (self.segment[1], self.change[1], self.options[1]) == ('segmentA', 123, FetchOptions(True, None, None, None, None)) - assert (self.segment[2], self.change[2], self.options[2]) == ('segmentB', -1, FetchOptions(True, None, None, None, None)) - assert (self.segment[3], self.change[3], self.options[3]) == ('segmentB', 123, FetchOptions(True, None, None, None, None)) - assert (self.segment[4], self.change[4], self.options[4]) == ('segmentC', -1, FetchOptions(True, None, None, None, None)) - assert (self.segment[5], self.change[5], self.options[5]) == ('segmentC', 123, FetchOptions(True, None, None, None, None)) - - segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) + api_calls = [] + for i in range(8): + api_calls.append((self.segment[i], self.change[i], self.options[i])) + + assert ('segmentD', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentD', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls + + segments_to_validate = set(['segmentA', 'segmentB', 'segmentC', 'segmentD']) for segment in self.segment_put: assert isinstance(segment, Segment) assert segment.name in segments_to_validate @@ -307,6 +359,11 @@ async def test_synchronize_segment(self, mocker): """Test particular segment update.""" split_storage = mocker.Mock(spec=SplitStorage) storage = mocker.Mock(spec=SegmentStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs async def change_number_mock(segment_name): if change_number_mock._count_a == 0: @@ -340,7 +397,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): api = mocker.Mock() api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) await segments_synchronizer.synchronize_segment('segmentA') assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) @@ -355,6 +412,11 @@ async def test_synchronize_segment_cdn(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) storage = mocker.Mock(spec=SegmentStorage) + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs async def change_number_mock(segment_name): change_number_mock._count_a += 1 @@ -400,7 +462,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): api = mocker.Mock() api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) await segments_synchronizer.synchronize_segment('segmentA') assert (self.segment[0], self.change[0], self.options[0]) == ('segmentA', -1, FetchOptions(True, None, None, None, None)) @@ -415,7 +477,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): @pytest.mark.asyncio async def test_recreate(self, mocker): """Test recreate logic.""" - segments_synchronizer = SegmentSynchronizerAsync(mocker.Mock(), mocker.Mock(), mocker.Mock()) + segments_synchronizer = SegmentSynchronizerAsync(mocker.Mock(), mocker.Mock(), mocker.Mock(), mocker.Mock()) current_pool = segments_synchronizer._worker_pool await segments_synchronizer.shutdown() segments_synchronizer.recreate() diff --git a/tests/sync/test_synchronizer.py b/tests/sync/test_synchronizer.py index 6c850dd5..60ab7993 100644 --- a/tests/sync/test_synchronizer.py +++ b/tests/sync/test_synchronizer.py @@ -106,6 +106,8 @@ def test_sync_all_failed_segments(self, mocker): storage = mocker.Mock() split_storage = mocker.Mock(spec=SplitStorage) split_storage.get_segment_names.return_value = ['segmentA'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] split_sync = mocker.Mock(spec=SplitSynchronizer) split_sync.synchronize_splits.return_value = None @@ -113,7 +115,7 @@ def run(x, y, c): raise APIException("something broke") api.fetch_segment.side_effect = run - segment_sync = SegmentSynchronizer(api, split_storage, storage) + segment_sync = SegmentSynchronizer(api, split_storage, storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) sychronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -132,7 +134,7 @@ def test_synchronize_splits(self, mocker): segment_api = mocker.Mock() segment_api.fetch_segment.return_value = {'name': 'segmentA', 'added': ['key1', 'key2', 'key3'], 'removed': [], 'since': 123, 'till': 123} - segment_sync = SegmentSynchronizer(segment_api, split_storage, segment_storage) + segment_sync = SegmentSynchronizer(segment_api, split_storage, segment_storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = Synchronizer(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -176,6 +178,7 @@ def sync_segments(*_): def test_sync_all(self, mocker): split_storage = mocker.Mock(spec=SplitStorage) rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] split_storage.get_change_number.return_value = 123 split_storage.get_segment_names.return_value = ['segmentA'] class flag_set_filter(): @@ -197,7 +200,7 @@ def intersect(sets): segment_api = mocker.Mock() segment_api.fetch_segment.return_value = {'name': 'segmentA', 'added': ['key1', 'key2', 'key3'], 'removed': [], 'since': 123, 'till': 123} - segment_sync = SegmentSynchronizer(segment_api, split_storage, segment_storage) + segment_sync = SegmentSynchronizer(segment_api, split_storage, segment_storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) @@ -469,7 +472,7 @@ async def test_sync_all_failed_segments(self, mocker): api = mocker.Mock() storage = mocker.Mock() split_storage = mocker.Mock(spec=SplitStorage) - split_storage.get_segment_names.return_value = ['segmentA'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) split_sync = mocker.Mock(spec=SplitSynchronizer) split_sync.synchronize_splits.return_value = None @@ -481,7 +484,11 @@ async def get_segment_names(): return ['seg'] split_storage.get_segment_names = get_segment_names - segment_sync = SegmentSynchronizerAsync(api, split_storage, storage) + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs + + segment_sync = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) sychronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -515,7 +522,7 @@ async def fetch_segment(segment_name, change, options): 'key3'], 'removed': [], 'since': 123, 'till': 123} segment_api.fetch_segment = fetch_segment - segment_sync = SegmentSynchronizerAsync(segment_api, split_storage, segment_storage) + segment_sync = SegmentSynchronizerAsync(segment_api, split_storage, segment_storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) @@ -620,7 +627,7 @@ async def fetch_segment(segment_name, change, options): 'removed': [], 'since': 123, 'till': 123} segment_api.fetch_segment = fetch_segment - segment_sync = SegmentSynchronizerAsync(segment_api, split_storage, segment_storage) + segment_sync = SegmentSynchronizerAsync(segment_api, split_storage, segment_storage, rbs_storage) split_synchronizers = SplitSynchronizers(split_sync, segment_sync, mocker.Mock(), mocker.Mock(), mocker.Mock()) synchronizer = SynchronizerAsync(split_synchronizers, mocker.Mock(spec=SplitTasks)) diff --git a/tests/tasks/test_segment_sync.py b/tests/tasks/test_segment_sync.py index d5640709..cc701e52 100644 --- a/tests/tasks/test_segment_sync.py +++ b/tests/tasks/test_segment_sync.py @@ -6,7 +6,7 @@ from splitio.api.commons import FetchOptions from splitio.tasks import segment_sync -from splitio.storage import SegmentStorage, SplitStorage +from splitio.storage import SegmentStorage, SplitStorage, RuleBasedSegmentsStorage from splitio.models.splits import Split from splitio.models.segments import Segment from splitio.models.grammar.condition import Condition @@ -21,6 +21,8 @@ def test_normal_operation(self, mocker): """Test the normal operation flow.""" split_storage = mocker.Mock(spec=SplitStorage) split_storage.get_segment_names.return_value = ['segmentA', 'segmentB', 'segmentC'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. @@ -65,7 +67,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) task = segment_sync.SegmentSynchronizationTask(segments_synchronizer.synchronize_segments, 0.5) task.start() @@ -99,6 +101,8 @@ def test_that_errors_dont_stop_task(self, mocker): """Test that if fetching segments fails at some_point, the task will continue running.""" split_storage = mocker.Mock(spec=SplitStorage) split_storage.get_segment_names.return_value = ['segmentA', 'segmentB', 'segmentC'] + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + rbs_storage.get_segment_names.return_value = [] # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. @@ -142,7 +146,7 @@ def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment.side_effect = fetch_segment_mock - segments_synchronizer = SegmentSynchronizer(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizer(api, split_storage, storage, rbs_storage) task = segment_sync.SegmentSynchronizationTask(segments_synchronizer.synchronize_segments, 0.5) task.start() @@ -183,6 +187,11 @@ async def get_segment_names(): return ['segmentA', 'segmentB', 'segmentC'] split_storage.get_segment_names = get_segment_names + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs + # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. storage = mocker.Mock(spec=SegmentStorage) @@ -241,7 +250,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) task = segment_sync.SegmentSynchronizationTaskAsync(segments_synchronizer.synchronize_segments, 0.5) task.start() @@ -251,12 +260,16 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): await task.stop() assert not task.is_running() - assert (self.segment_name[0], self.change_number[0], self.fetch_options[0]) == ('segmentA', -1, fetch_options) - assert (self.segment_name[1], self.change_number[1], self.fetch_options[1]) == ('segmentA', 123, fetch_options) - assert (self.segment_name[2], self.change_number[2], self.fetch_options[2]) == ('segmentB', -1, fetch_options) - assert (self.segment_name[3], self.change_number[3], self.fetch_options[3]) == ('segmentB', 123, fetch_options) - assert (self.segment_name[4], self.change_number[4], self.fetch_options[4]) == ('segmentC', -1, fetch_options) - assert (self.segment_name[5], self.change_number[5], self.fetch_options[5]) == ('segmentC', 123, fetch_options) + api_calls = [] + for i in range(6): + api_calls.append((self.segment_name[i], self.change_number[i], self.fetch_options[i])) + + assert ('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) for segment in self.segments: @@ -272,6 +285,11 @@ async def get_segment_names(): return ['segmentA', 'segmentB', 'segmentC'] split_storage.get_segment_names = get_segment_names + rbs_storage = mocker.Mock(spec=RuleBasedSegmentsStorage) + async def get_segment_names_rbs(): + return [] + rbs_storage.get_segment_names = get_segment_names_rbs + # Setup a mocked segment storage whose changenumber returns -1 on first fetch and # 123 afterwards. storage = mocker.Mock(spec=SegmentStorage) @@ -329,7 +347,7 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): fetch_options = FetchOptions(True, None, None, None, None) api.fetch_segment = fetch_segment_mock - segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage) + segments_synchronizer = SegmentSynchronizerAsync(api, split_storage, storage, rbs_storage) task = segment_sync.SegmentSynchronizationTaskAsync(segments_synchronizer.synchronize_segments, 0.5) task.start() @@ -338,12 +356,16 @@ async def fetch_segment_mock(segment_name, change_number, fetch_options): await task.stop() assert not task.is_running() - - assert (self.segment_name[0], self.change_number[0], self.fetch_options[0]) == ('segmentA', -1, fetch_options) - assert (self.segment_name[1], self.change_number[1], self.fetch_options[1]) == ('segmentA', 123, fetch_options) - assert (self.segment_name[2], self.change_number[2], self.fetch_options[2]) == ('segmentB', -1, fetch_options) - assert (self.segment_name[3], self.change_number[3], self.fetch_options[3]) == ('segmentC', -1, fetch_options) - assert (self.segment_name[4], self.change_number[4], self.fetch_options[4]) == ('segmentC', 123, fetch_options) + + api_calls = [] + for i in range(5): + api_calls.append((self.segment_name[i], self.change_number[i], self.fetch_options[i])) + + assert ('segmentA', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentA', 123, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentB', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', -1, FetchOptions(True, None, None, None, None)) in api_calls + assert ('segmentC', 123, FetchOptions(True, None, None, None, None)) in api_calls segments_to_validate = set(['segmentA', 'segmentB', 'segmentC']) for segment in self.segments: diff --git a/tests/util/test_storage_helper.py b/tests/util/test_storage_helper.py index 1dab0d01..5804a6fa 100644 --- a/tests/util/test_storage_helper.py +++ b/tests/util/test_storage_helper.py @@ -2,7 +2,8 @@ import pytest from splitio.util.storage_helper import update_feature_flag_storage, get_valid_flag_sets, combine_valid_flag_sets, \ - update_rule_based_segment_storage, update_rule_based_segment_storage_async, update_feature_flag_storage_async + update_rule_based_segment_storage, update_rule_based_segment_storage_async, update_feature_flag_storage_async, \ + get_standard_segment_names_in_rbs_storage_async, get_standard_segment_names_in_rbs_storage from splitio.storage.inmemmory import InMemorySplitStorage, InMemoryRuleBasedSegmentStorage, InMemoryRuleBasedSegmentStorageAsync, \ InMemorySplitStorageAsync from splitio.models import splits, rule_based_segments @@ -190,6 +191,17 @@ def clear(): segments = update_rule_based_segment_storage(storage, [self.rbs], 123, True) assert self.clear == 1 + + def test_get_standard_segment_in_rbs_storage(self, mocker): + storage = InMemoryRuleBasedSegmentStorage() + segments = update_rule_based_segment_storage(storage, [self.rbs], 123) + assert get_standard_segment_names_in_rbs_storage(storage) == {'excluded_segment', 'employees'} + + @pytest.mark.asyncio + async def test_get_standard_segment_in_rbs_storage(self, mocker): + storage = InMemoryRuleBasedSegmentStorageAsync() + segments = await update_rule_based_segment_storage_async(storage, [self.rbs], 123) + assert await get_standard_segment_names_in_rbs_storage_async(storage) == {'excluded_segment', 'employees'} @pytest.mark.asyncio async def test_update_rule_base_segment_storage_async(self, mocker): From ca2e3cb5002325b04add55be6f21e3c18d17702d Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Fri, 16 May 2025 12:42:48 -0700 Subject: [PATCH 33/56] updated split api --- splitio/api/splits.py | 19 ++++++++-- tests/api/test_splits_api.py | 73 +++++++++++++++++++++++++++++++++++- 2 files changed, 88 insertions(+), 4 deletions(-) diff --git a/splitio/api/splits.py b/splitio/api/splits.py index dcbb46f7..619306a1 100644 --- a/splitio/api/splits.py +++ b/splitio/api/splits.py @@ -37,11 +37,20 @@ def __init__(self, client, sdk_key, sdk_metadata, telemetry_runtime_producer): self._spec_version = SPEC_VERSION self._last_proxy_check_timestamp = 0 self.clear_storage = False + self._old_spec_since = None - def _check_last_proxy_check_timestamp(self): + def _check_last_proxy_check_timestamp(self, since): if self._spec_version == _SPEC_1_1 and ((utctime_ms() - self._last_proxy_check_timestamp) >= _PROXY_CHECK_INTERVAL_MILLISECONDS_SS): _LOGGER.info("Switching to new Feature flag spec (%s) and fetching.", SPEC_VERSION); self._spec_version = SPEC_VERSION + self._old_spec_since = since + + def _check_old_spec_since(self, change_number): + if self._spec_version == _SPEC_1_1 and self._old_spec_since is not None: + since = self._old_spec_since + self._old_spec_since = None + return since + return change_number class SplitsAPI(SplitsAPIBase): # pylint: disable=too-few-public-methods @@ -77,7 +86,9 @@ def fetch_splits(self, change_number, rbs_change_number, fetch_options): :rtype: dict """ try: - self._check_last_proxy_check_timestamp() + self._check_last_proxy_check_timestamp(change_number) + change_number = self._check_old_spec_since(change_number) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = self._client.get( 'sdk', @@ -145,7 +156,9 @@ async def fetch_splits(self, change_number, rbs_change_number, fetch_options): :rtype: dict """ try: - self._check_last_proxy_check_timestamp() + self._check_last_proxy_check_timestamp(change_number) + change_number = self._check_old_spec_since(change_number) + query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = await self._client.get( 'sdk', diff --git a/tests/api/test_splits_api.py b/tests/api/test_splits_api.py index bfb45c16..c9aeee8b 100644 --- a/tests/api/test_splits_api.py +++ b/tests/api/test_splits_api.py @@ -122,6 +122,41 @@ def get(sdk, splitChanges, sdk_key, extra_headers, query): assert self.query[2] == {'s': '1.3', 'since': 123, 'rbSince': -1} assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": 123, "t": -1}} assert split_api.clear_storage + + def test_using_old_spec_since(self, mocker): + """Test using old_spec_since variable.""" + httpclient = mocker.Mock(spec=client.HttpClient) + self.counter = 0 + self.query = [] + def get(sdk, splitChanges, sdk_key, extra_headers, query): + self.counter += 1 + self.query.append(query) + if self.counter == 1: + return client.HttpResponse(400, 'error', {}) + if self.counter == 2: + return client.HttpResponse(200, '{"splits": [], "since": 123, "till": 456}', {}) + if self.counter == 3: + return client.HttpResponse(400, 'error', {}) + if self.counter == 4: + return client.HttpResponse(200, '{"splits": [], "since": 456, "till": 456}', {}) + + httpclient.is_sdk_endpoint_overridden.return_value = True + httpclient.get = get + split_api = splits.SplitsAPI(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) + response = split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] + assert not split_api.clear_storage + + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 10 + + response = split_api.fetch_splits(456, -1, FetchOptions(False, None, None, None)) + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 1000000 + assert self.query[2] == {'s': '1.3', 'since': 456, 'rbSince': -1} + assert self.query[3] == {'s': '1.1', 'since': 456} + assert response == {"ff": {"d": [], "s": 456, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} class SplitAPIAsyncTests(object): """Split async API test cases.""" @@ -253,9 +288,45 @@ async def get(sdk, splitChanges, sdk_key, extra_headers, query): assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] assert not split_api.clear_storage - time.sleep(1) + time.sleep(1) splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 10 response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) assert self.query[2] == {'s': '1.3', 'since': 123, 'rbSince': -1} assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": 123, "t": -1}} assert split_api.clear_storage + + @pytest.mark.asyncio + async def test_using_old_spec_since(self, mocker): + """Test using old_spec_since variable.""" + httpclient = mocker.Mock(spec=client.HttpClient) + self.counter = 0 + self.query = [] + async def get(sdk, splitChanges, sdk_key, extra_headers, query): + self.counter += 1 + self.query.append(query) + if self.counter == 1: + return client.HttpResponse(400, 'error', {}) + if self.counter == 2: + return client.HttpResponse(200, '{"splits": [], "since": 123, "till": 456}', {}) + if self.counter == 3: + return client.HttpResponse(400, 'error', {}) + if self.counter == 4: + return client.HttpResponse(200, '{"splits": [], "since": 456, "till": 456}', {}) + + httpclient.is_sdk_endpoint_overridden.return_value = True + httpclient.get = get + split_api = splits.SplitsAPIAsync(httpclient, 'some_api_key', SdkMetadata('1.0', 'some', '1.2.3.4'), mocker.Mock()) + response = await split_api.fetch_splits(123, -1, FetchOptions(False, None, None, None)) + assert response == {"ff": {"d": [], "s": 123, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} + assert self.query == [{'s': '1.3', 'since': 123, 'rbSince': -1}, {'s': '1.1', 'since': 123}] + assert not split_api.clear_storage + + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 10 + + response = await split_api.fetch_splits(456, -1, FetchOptions(False, None, None, None)) + time.sleep(1) + splits._PROXY_CHECK_INTERVAL_MILLISECONDS_SS = 1000000 + assert self.query[2] == {'s': '1.3', 'since': 456, 'rbSince': -1} + assert self.query[3] == {'s': '1.1', 'since': 456} + assert response == {"ff": {"d": [], "s": 456, "t": 456}, "rbs": {"d": [], "s": -1, "t": -1}} From 338ac8924604b6248973e7c33b25758a1abbbd9c Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Wed, 21 May 2025 09:42:41 -0700 Subject: [PATCH 34/56] Fixed proxy error --- splitio/api/client.py | 10 +++++++++- splitio/api/splits.py | 8 ++++++++ splitio/version.py | 2 +- 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/splitio/api/client.py b/splitio/api/client.py index 5d3ef6f4..c9032e0e 100644 --- a/splitio/api/client.py +++ b/splitio/api/client.py @@ -207,7 +207,11 @@ def get(self, server, path, sdk_key, query=None, extra_headers=None): # pylint: self._record_telemetry(response.status_code, get_current_epoch_time_ms() - start) return HttpResponse(response.status_code, response.text, response.headers) - except Exception as exc: # pylint: disable=broad-except + except requests.exceptions.ChunkedEncodingError as exc: + _LOGGER.error("IncompleteRead exception detected: %s", exc) + return HttpResponse(400, "", {}) + + except Exception as exc: # pylint: disable=broad-except raise HttpClientException(_EXC_MSG.format(source='request')) from exc def post(self, server, path, sdk_key, body, query=None, extra_headers=None): # pylint: disable=too-many-arguments @@ -300,6 +304,10 @@ async def get(self, server, path, apikey, query=None, extra_headers=None): # py await self._record_telemetry(response.status, get_current_epoch_time_ms() - start) return HttpResponse(response.status, body, response.headers) + except aiohttp.ClientPayloadError as exc: + _LOGGER.error("ContentLengthError exception detected: %s", exc) + return HttpResponse(400, "", {}) + except aiohttp.ClientError as exc: # pylint: disable=broad-except raise HttpClientException(_EXC_MSG.format(source='aiohttp')) from exc diff --git a/splitio/api/splits.py b/splitio/api/splits.py index 619306a1..771100fc 100644 --- a/splitio/api/splits.py +++ b/splitio/api/splits.py @@ -89,6 +89,10 @@ def fetch_splits(self, change_number, rbs_change_number, fetch_options): self._check_last_proxy_check_timestamp(change_number) change_number = self._check_old_spec_since(change_number) + if self._spec_version == _SPEC_1_1: + fetch_options = FetchOptions(fetch_options.cache_control_headers, fetch_options.change_number, + None, fetch_options.sets, self._spec_version) + rbs_change_number = None query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = self._client.get( 'sdk', @@ -158,6 +162,10 @@ async def fetch_splits(self, change_number, rbs_change_number, fetch_options): try: self._check_last_proxy_check_timestamp(change_number) change_number = self._check_old_spec_since(change_number) + if self._spec_version == _SPEC_1_1: + fetch_options = FetchOptions(fetch_options.cache_control_headers, fetch_options.change_number, + None, fetch_options.sets, self._spec_version) + rbs_change_number = None query, extra_headers = build_fetch(change_number, fetch_options, self._metadata, rbs_change_number) response = await self._client.get( diff --git a/splitio/version.py b/splitio/version.py index e8137101..bb552668 100644 --- a/splitio/version.py +++ b/splitio/version.py @@ -1 +1 @@ -__version__ = '10.2.0' \ No newline at end of file +__version__ = '10.3.0-rc2' \ No newline at end of file From 6dcac32d6afee3001ff32578a573d7d3c86b3246 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Wed, 21 May 2025 10:27:00 -0700 Subject: [PATCH 35/56] Fixed matcher --- splitio/models/grammar/matchers/rule_based_segment.py | 7 +++++-- tests/engine/files/rule_base_segments2.json | 2 +- tests/engine/test_evaluator.py | 4 ++-- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index 06baf4b2..81777f0d 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -63,9 +63,12 @@ def _match_dep_rb_segments(self, excluded_rb_segments, key, attributes, context) else: excluded_segment = context['ec'].rbs_segments.get(excluded_rb_segment.name) if key in excluded_segment.excluded.get_excluded_keys(): - return False + return True if self._match_dep_rb_segments(excluded_segment.excluded.get_excluded_segments(), key, attributes, context): return True + + if self._match_conditions(excluded_segment.conditions, key, attributes, context): + return True - return self._match_conditions(excluded_segment.conditions, key, attributes, context) + return False diff --git a/tests/engine/files/rule_base_segments2.json b/tests/engine/files/rule_base_segments2.json index ee356fd8..2f77ecd5 100644 --- a/tests/engine/files/rule_base_segments2.json +++ b/tests/engine/files/rule_base_segments2.json @@ -19,7 +19,7 @@ "trafficType": "user", "attribute": "email" }, - "matcherType": "START_WITH", + "matcherType": "STARTS_WITH", "negate": false, "whitelistMatcherData": { "whitelist": [ diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index a2937126..99f12cd7 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -314,7 +314,7 @@ def test_using_rbs_in_excluded(self): ctx = evaluation_facctory.context_for('bilal', ['some']) assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" ctx = evaluation_facctory.context_for('bilal2@split.io', ['some']) - assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "on" + assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "off" @pytest.mark.asyncio async def test_evaluate_treatment_with_rbs_in_condition_async(self): @@ -386,7 +386,7 @@ async def test_using_rbs_in_excluded_async(self): ctx = await evaluation_facctory.context_for('bilal', ['some']) assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" ctx = await evaluation_facctory.context_for('bilal2@split.io', ['some']) - assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "on" + assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "off" class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" From c09320643d7661c547c9bd0bf622461b3b39b815 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Thu, 29 May 2025 10:08:39 -0700 Subject: [PATCH 36/56] Added models --- splitio/models/splits.py | 50 +++++++++++++++++++++++++++++++++---- tests/models/test_splits.py | 20 +++++++++++++-- 2 files changed, 63 insertions(+), 7 deletions(-) diff --git a/splitio/models/splits.py b/splitio/models/splits.py index 92a277c4..47e69284 100644 --- a/splitio/models/splits.py +++ b/splitio/models/splits.py @@ -10,7 +10,7 @@ SplitView = namedtuple( 'SplitView', - ['name', 'traffic_type', 'killed', 'treatments', 'change_number', 'configs', 'default_treatment', 'sets', 'impressions_disabled'] + ['name', 'traffic_type', 'killed', 'treatments', 'change_number', 'configs', 'default_treatment', 'sets', 'impressions_disabled', 'prerequisites'] ) _DEFAULT_CONDITIONS_TEMPLATE = { @@ -40,7 +40,28 @@ "label": "targeting rule type unsupported by sdk" } +class Prerequisites(object): + """Prerequisites.""" + def __init__(self, feature_flag_name, treatments): + self._feature_flag_name = feature_flag_name + self._treatments = treatments + + @property + def feature_flag_name(self): + """Return featur eflag name.""" + return self._feature_flag_name + @property + def treatments(self): + """Return treatments.""" + return self._treatments + + def to_json(self): + to_return = [] + for feature_flag_name in self._feature_flag_name: + to_return.append({"n": feature_flag_name, "ts": [treatment for treatment in self._treatments]}) + + return to_return class Status(Enum): """Split status.""" @@ -74,7 +95,8 @@ def __init__( # pylint: disable=too-many-arguments traffic_allocation_seed=None, configurations=None, sets=None, - impressions_disabled=None + impressions_disabled=None, + prerequisites = None ): """ Class constructor. @@ -99,6 +121,8 @@ def __init__( # pylint: disable=too-many-arguments :type sets: list :pram impressions_disabled: track impressions flag :type impressions_disabled: boolean + :pram prerequisites: prerequisites + :type prerequisites: List of Preqreuisites """ self._name = name self._seed = seed @@ -129,6 +153,7 @@ def __init__( # pylint: disable=too-many-arguments self._configurations = configurations self._sets = set(sets) if sets is not None else set() self._impressions_disabled = impressions_disabled if impressions_disabled is not None else False + self._prerequisites = prerequisites if prerequisites is not None else [] @property def name(self): @@ -194,6 +219,11 @@ def sets(self): def impressions_disabled(self): """Return impressions_disabled of the split.""" return self._impressions_disabled + + @property + def prerequisites(self): + """Return prerequisites of the split.""" + return self._prerequisites def get_configurations_for(self, treatment): """Return the mapping of treatments to configurations.""" @@ -224,7 +254,8 @@ def to_json(self): 'conditions': [c.to_json() for c in self.conditions], 'configurations': self._configurations, 'sets': list(self._sets), - 'impressionsDisabled': self._impressions_disabled + 'impressionsDisabled': self._impressions_disabled, + 'prerequisites': [prerequisite.to_json() for prerequisite in self._prerequisites] } def to_split_view(self): @@ -243,7 +274,8 @@ def to_split_view(self): self._configurations if self._configurations is not None else {}, self._default_treatment, list(self._sets) if self._sets is not None else [], - self._impressions_disabled + self._impressions_disabled, + self._prerequisites ) def local_kill(self, default_treatment, change_number): @@ -300,5 +332,13 @@ def from_raw(raw_split): traffic_allocation_seed=raw_split.get('trafficAllocationSeed'), configurations=raw_split.get('configurations'), sets=set(raw_split.get('sets')) if raw_split.get('sets') is not None else [], - impressions_disabled=raw_split.get('impressionsDisabled') if raw_split.get('impressionsDisabled') is not None else False + impressions_disabled=raw_split.get('impressionsDisabled') if raw_split.get('impressionsDisabled') is not None else False, + prerequisites=from_raw_prerequisites(raw_split.get('prerequisites')) if raw_split.get('prerequisites') is not None else [] ) + +def from_raw_prerequisites(raw_prerequisites): + to_return = [] + for prerequisite in raw_prerequisites: + to_return.append(Prerequisites(prerequisite['n'], prerequisite['ts'])) + + return to_return \ No newline at end of file diff --git a/tests/models/test_splits.py b/tests/models/test_splits.py index 442a18d0..472ecde9 100644 --- a/tests/models/test_splits.py +++ b/tests/models/test_splits.py @@ -11,6 +11,10 @@ class SplitTests(object): 'changeNumber': 123, 'trafficTypeName': 'user', 'name': 'some_name', + 'prerequisites': [ + { 'n': 'flag1', 'ts': ['on','v1'] }, + { 'n': 'flag2', 'ts': ['off'] } + ], 'trafficAllocation': 100, 'trafficAllocationSeed': 123456, 'seed': 321654, @@ -83,14 +87,26 @@ def test_from_raw(self): assert parsed._configurations == {'on': '{"color": "blue", "size": 13}'} assert parsed.sets == {'set1', 'set2'} assert parsed.impressions_disabled == False - + assert len(parsed.prerequisites) == 2 + flag1 = False + flag2 = False + for prerequisite in parsed.prerequisites: + if prerequisite.feature_flag_name == 'flag1': + flag1 = True + assert prerequisite.treatments == ['on','v1'] + if prerequisite.feature_flag_name == 'flag2': + flag2 = True + assert prerequisite.treatments == ['off'] + assert flag1 + assert flag2 + def test_get_segment_names(self, mocker): """Test fetching segment names.""" cond1 = mocker.Mock(spec=Condition) cond2 = mocker.Mock(spec=Condition) cond1.get_segment_names.return_value = ['segment1', 'segment2'] cond2.get_segment_names.return_value = ['segment3', 'segment4'] - split1 = splits.Split( 'some_split', 123, False, 'off', 'user', 'ACTIVE', 123, [cond1, cond2]) + split1 = splits.Split( 'some_split', 123, False, 'off', 'user', 'ACTIVE', 123, [cond1, cond2], None) assert split1.get_segment_names() == ['segment%d' % i for i in range(1, 5)] def test_to_json(self): From 8281decbd64391bea3e6c5c3272fb8caf2a51f2b Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Thu, 29 May 2025 11:36:58 -0700 Subject: [PATCH 37/56] Added matcher --- .../models/grammar/matchers/prerequisites.py | 41 +++ tests/models/grammar/files/splits_prereq.json | 293 ++++++++++++++++++ tests/models/grammar/test_matchers.py | 37 ++- 3 files changed, 370 insertions(+), 1 deletion(-) create mode 100644 splitio/models/grammar/matchers/prerequisites.py create mode 100644 tests/models/grammar/files/splits_prereq.json diff --git a/splitio/models/grammar/matchers/prerequisites.py b/splitio/models/grammar/matchers/prerequisites.py new file mode 100644 index 00000000..d0a62eba --- /dev/null +++ b/splitio/models/grammar/matchers/prerequisites.py @@ -0,0 +1,41 @@ +"""Prerequisites matcher classes.""" + +class PrerequisitesMatcher(object): + + def __init__(self, prerequisites): + """ + Build a PrerequisitesMatcher. + + :param prerequisites: prerequisites + :type raw_matcher: List of Prerequisites + """ + self._prerequisites = prerequisites + + def match(self, key, attributes=None, context=None): + """ + Evaluate user input against a matcher and return whether the match is successful. + + :param key: User key. + :type key: str. + :param attributes: Custom user attributes. + :type attributes: dict. + :param context: Evaluation context + :type context: dict + + :returns: Wheter the match is successful. + :rtype: bool + """ + if self._prerequisites == None: + return True + + if not isinstance(key, str): + return False + + evaluator = context.get('evaluator') + bucketing_key = context.get('bucketing_key') + for prerequisite in self._prerequisites: + result = evaluator.eval_with_context(key, bucketing_key, prerequisite.feature_flag_name, attributes, context['ec']) + if result['treatment'] not in prerequisite.treatments: + return False + + return True \ No newline at end of file diff --git a/tests/models/grammar/files/splits_prereq.json b/tests/models/grammar/files/splits_prereq.json new file mode 100644 index 00000000..5efa7fed --- /dev/null +++ b/tests/models/grammar/files/splits_prereq.json @@ -0,0 +1,293 @@ +{"ff": { + "d": [ + { + "trafficTypeName": "user", + "name": "test_prereq", + "prerequisites": [ + { "n": "feature_segment", "ts": ["off", "def_test"] }, + { "n": "rbs_flag", "ts": ["on"] } + ], + "trafficAllocation": 100, + "trafficAllocationSeed": 1582960494, + "seed": 1842944006, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "def_treatment", + "changeNumber": 1582741588594, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "default rule" + } + ] + }, + { + "name":"feature_segment", + "trafficTypeId":"u", + "trafficTypeName":"User", + "trafficAllocation": 100, + "trafficAllocationSeed": 1582960494, + "seed":-1177551240, + "status":"ACTIVE", + "killed":false, + "defaultTreatment":"def_test", + "changeNumber": 1582741588594, + "algo": 2, + "configurations": {}, + "conditions":[ + { + "matcherGroup":{ + "combiner":"AND", + "matchers":[ + { + "matcherType":"IN_SEGMENT", + "negate":false, + "userDefinedSegmentMatcherData":{ + "segmentName":"segment-test" + }, + "whitelistMatcherData":null + } + ] + }, + "partitions":[ + { + "treatment":"on", + "size":100 + }, + { + "treatment":"off", + "size":0 + } + ], + "label": "default label" + } + ] + }, + { + "changeNumber": 10, + "trafficTypeName": "user", + "name": "rbs_flag", + "trafficAllocation": 100, + "trafficAllocationSeed": 1828377380, + "seed": -286617921, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "off", + "algo": 2, + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "IN_RULE_BASED_SEGMENT", + "negate": false, + "userDefinedSegmentMatcherData": { + "segmentName": "sample_rule_based_segment" + } + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ], + "label": "in rule based segment sample_rule_based_segment" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user" + }, + "matcherType": "ALL_KEYS", + "negate": false + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 0 + }, + { + "treatment": "off", + "size": 100 + } + ], + "label": "default rule" + } + ], + "configurations": {}, + "sets": [], + "impressionsDisabled": false + }, + { + "trafficTypeName": "user", + "name": "prereq_chain", + "prerequisites": [ + { "n": "test_prereq", "ts": ["on"] } + ], + "trafficAllocation": 100, + "trafficAllocationSeed": -2092979940, + "seed": 105482719, + "status": "ACTIVE", + "killed": false, + "defaultTreatment": "on_default", + "changeNumber": 1585948850109, + "algo": 2, + "configurations": {}, + "conditions": [ + { + "conditionType": "WHITELIST", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": null, + "matcherType": "WHITELIST", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": { + "whitelist": [ + "bilal@split.io" + ] + }, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on_whitelist", + "size": 100 + } + ], + "label": "whitelisted" + }, + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": null + }, + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null, + "unaryNumericMatcherData": null, + "betweenMatcherData": null, + "booleanMatcherData": null, + "dependencyMatcherData": null, + "stringMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + }, + { + "treatment": "V1", + "size": 0 + } + ], + "label": "default rule" + } + ] + } + ], + "s": -1, + "t": 1585948850109 +}, "rbs":{"d": [ + { + "changeNumber": 5, + "name": "sample_rule_based_segment", + "status": "ACTIVE", + "trafficTypeName": "user", + "excluded":{ + "keys":["mauro@split.io","gaston@split.io"], + "segments":[] + }, + "conditions": [ + { + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "keySelector": { + "trafficType": "user", + "attribute": "email" + }, + "matcherType": "ENDS_WITH", + "negate": false, + "whitelistMatcherData": { + "whitelist": [ + "@split.io" + ] + } + } + ] + } + } + ] + }], "s": -1, "t": 1585948850109} +} diff --git a/tests/models/grammar/test_matchers.py b/tests/models/grammar/test_matchers.py index 680a8cc7..c63aa1c7 100644 --- a/tests/models/grammar/test_matchers.py +++ b/tests/models/grammar/test_matchers.py @@ -11,6 +11,7 @@ from datetime import datetime from splitio.models.grammar import matchers +from splitio.models.grammar.matchers.prerequisites import PrerequisitesMatcher from splitio.models import splits from splitio.models import rule_based_segments from splitio.models.grammar import condition @@ -1136,4 +1137,38 @@ def test_matcher_behaviour(self, mocker): )} assert matcher._match(None, context=ec) is False assert matcher._match('bilal@split.io', context=ec) is False - assert matcher._match('bilal@split.io', {'email': 'bilal@split.io'}, context=ec) is True \ No newline at end of file + assert matcher._match('bilal@split.io', {'email': 'bilal@split.io'}, context=ec) is True + +class PrerequisitesMatcherTests(MatcherTestsBase): + """tests for prerequisites matcher.""" + + def test_init(self, mocker): + """Test init.""" + split_load = os.path.join(os.path.dirname(__file__), 'files', 'splits_prereq.json') + with open(split_load, 'r') as flo: + data = json.loads(flo.read()) + + prereq = splits.from_raw_prerequisites(data['ff']['d'][0]['prerequisites']) + parsed = PrerequisitesMatcher(prereq) + assert parsed._prerequisites == prereq + + def test_matcher_behaviour(self, mocker): + """Test if the matcher works properly.""" + split_load = os.path.join(os.path.dirname(__file__), 'files', 'splits_prereq.json') + with open(split_load, 'r') as flo: + data = json.loads(flo.read()) + prereq = splits.from_raw_prerequisites(data['ff']['d'][3]['prerequisites']) + parsed = PrerequisitesMatcher(prereq) + evaluator = mocker.Mock(spec=Evaluator) + + + evaluator.eval_with_context.return_value = {'treatment': 'on'} + assert parsed.match('SPLIT_2', {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is True + + evaluator.eval_with_context.return_value = {'treatment': 'off'} + assert parsed.match('SPLIT_2', {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False + + assert parsed.match([], {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False + assert parsed.match({}, {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False + assert parsed.match(123, {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False + assert parsed.match(object(), {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False From 2214cd5f0066617600f731dee1f80cd52344c207 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Fri, 30 May 2025 08:43:45 -0700 Subject: [PATCH 38/56] Updated evaluator --- splitio/engine/evaluator.py | 29 +++++--- splitio/models/impressions.py | 5 ++ tests/engine/test_evaluator.py | 126 ++++++++++++++++++++++++++++++--- 3 files changed, 142 insertions(+), 18 deletions(-) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index d3e05f78..5cbbd205 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -7,6 +7,7 @@ from splitio.models.grammar.matchers.misc import DependencyMatcher from splitio.models.grammar.matchers.keys import UserDefinedSegmentMatcher from splitio.models.grammar.matchers import RuleBasedSegmentMatcher +from splitio.models.grammar.matchers.prerequisites import PrerequisitesMatcher from splitio.models.rule_based_segments import SegmentType from splitio.optional.loaders import asyncio @@ -56,12 +57,22 @@ def eval_with_context(self, key, bucketing, feature_name, attrs, ctx): label = Label.KILLED _treatment = feature.default_treatment else: - treatment, label = self._treatment_for_flag(feature, key, bucketing, attrs, ctx) - if treatment is None: - label = Label.NO_CONDITION_MATCHED - _treatment = feature.default_treatment - else: - _treatment = treatment + if feature.prerequisites is not None: + prerequisites_matcher = PrerequisitesMatcher(feature.prerequisites) + if not prerequisites_matcher.match(key, attrs, { + 'evaluator': self, + 'bucketing_key': bucketing, + 'ec': ctx}): + label = Label.PREREQUISITES_NOT_MET + _treatment = feature.default_treatment + + if _treatment == CONTROL: + treatment, label = self._treatment_for_flag(feature, key, bucketing, attrs, ctx) + if treatment is None: + label = Label.NO_CONDITION_MATCHED + _treatment = feature.default_treatment + else: + _treatment = treatment return { 'treatment': _treatment, @@ -133,7 +144,6 @@ def context_for(self, key, feature_names): rb_segments ) - class AsyncEvaluationDataFactory: def __init__(self, split_storage, segment_storage, rbs_segment_storage): @@ -199,6 +209,7 @@ def get_pending_objects(features, splits, rbsegments, rb_segments, pending_membe pending_rbs = set() for feature in features.values(): cf, cs, crbs = get_dependencies(feature) + cf.extend(get_prerequisites(feature)) pending.update(filter(lambda f: f not in splits, cf)) pending_memberships.update(cs) pending_rbs.update(filter(lambda f: f not in rb_segments, crbs)) @@ -223,4 +234,6 @@ def update_objects(fetched, fetched_rbs, splits, rb_segments): rb_segments.update(rbsegments) return features, rbsegments, splits, rb_segments - \ No newline at end of file + +def get_prerequisites(feature): + return [prerequisite.feature_flag_name for prerequisite in feature.prerequisites] diff --git a/splitio/models/impressions.py b/splitio/models/impressions.py index 9bdfb3a9..9224d15b 100644 --- a/splitio/models/impressions.py +++ b/splitio/models/impressions.py @@ -60,3 +60,8 @@ class Label(object): # pylint: disable=too-few-public-methods # Treatment: control # Label: not ready NOT_READY = 'not ready' + + # Condition: Prerequisites not met + # Treatment: Default treatment + # Label: prerequisites not met + PREREQUISITES_NOT_MET = "prerequisites not met" diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 99f12cd7..390b4ce7 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -5,7 +5,7 @@ import pytest import copy -from splitio.models.splits import Split, Status +from splitio.models.splits import Split, Status, from_raw, Prerequisites from splitio.models import segments from splitio.models.grammar.condition import Condition, ConditionType from splitio.models.impressions import Label @@ -127,6 +127,7 @@ def test_evaluate_treatment_killed_split(self, mocker): mocked_split.killed = True mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' + mocked_split.prerequisites = [] ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) @@ -146,6 +147,8 @@ def test_evaluate_treatment_ok(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' + mocked_split.prerequisites = [] + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' @@ -165,6 +168,8 @@ def test_evaluate_treatment_ok_no_config(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = None + mocked_split.prerequisites = [] + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) result = e.eval_with_context('some_key', 'some_bucketing_key', 'some', {}, ctx) assert result['treatment'] == 'on' @@ -184,6 +189,7 @@ def test_evaluate_treatments(self, mocker): mocked_split.killed = False mocked_split.change_number = 123 mocked_split.get_configurations_for.return_value = '{"some_property": 123}' + mocked_split.prerequisites = [] mocked_split2 = mocker.Mock(spec=Split) mocked_split2.name = 'feature4' @@ -191,6 +197,7 @@ def test_evaluate_treatments(self, mocker): mocked_split2.killed = False mocked_split2.change_number = 123 mocked_split2.get_configurations_for.return_value = None + mocked_split2.prerequisites = [] ctx = EvaluationContext(flags={'feature2': mocked_split, 'feature4': mocked_split2}, segment_memberships=set(), rbs_segments={}) results = e.eval_many_with_context('some_key', 'some_bucketing_key', ['feature2', 'feature4'], {}, ctx) @@ -215,6 +222,8 @@ def test_get_gtreatment_for_split_no_condition_matches(self, mocker): mocked_split.change_number = '123' mocked_split.conditions = [] mocked_split.get_configurations_for = None + mocked_split.prerequisites = [] + ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={}) assert e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, ctx) == ( 'off', @@ -232,6 +241,8 @@ def test_get_gtreatment_for_split_non_rollout(self, mocker): mocked_split = mocker.Mock(spec=Split) mocked_split.killed = False mocked_split.conditions = [mocked_condition_1] + mocked_split.prerequisites = [] + treatment, label = e._treatment_for_flag(mocked_split, 'some_key', 'some_bucketing', {}, EvaluationContext(None, None, None)) assert treatment == 'on' assert label == 'some_label' @@ -240,7 +251,7 @@ def test_evaluate_treatment_with_rule_based_segment(self, mocker): """Test that a non-killed split returns the appropriate treatment.""" e = evaluator.Evaluator(splitters.Splitter()) - mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False, []) ctx = EvaluationContext(flags={'some': mocked_split}, segment_memberships=set(), rbs_segments={'sample_rule_based_segment': rule_based_segments.from_raw(rbs_raw)}) result = e.eval_with_context('bilal@split.io', 'bilal@split.io', 'some', {'email': 'bilal@split.io'}, ctx) @@ -257,7 +268,7 @@ def test_evaluate_treatment_with_rbs_in_condition(self): with open(rbs_segments, 'r') as flo: data = json.loads(flo.read()) - mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False, []) rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) rbs_storage.update([rbs, rbs2], [], 12) @@ -279,7 +290,7 @@ def test_using_segment_in_excluded(self): segment_storage = InMemorySegmentStorage() evaluation_facctory = EvaluationDataFactory(splits_storage, segment_storage, rbs_storage) - mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False, []) rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) rbs_storage.update([rbs], [], 12) splits_storage.update([mocked_split], [], 12) @@ -303,7 +314,7 @@ def test_using_rbs_in_excluded(self): segment_storage = InMemorySegmentStorage() evaluation_facctory = EvaluationDataFactory(splits_storage, segment_storage, rbs_storage) - mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False, []) rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) rbs2 = rule_based_segments.from_raw(data["rbs"]["d"][1]) rbs_storage.update([rbs, rbs2], [], 12) @@ -315,7 +326,52 @@ def test_using_rbs_in_excluded(self): assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" ctx = evaluation_facctory.context_for('bilal2@split.io', ['some']) assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "off" - + + def test_prerequisites(self): + splits_load = os.path.join(os.path.dirname(__file__), '../models/grammar/files', 'splits_prereq.json') + with open(splits_load, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorage() + rbs_storage = InMemoryRuleBasedSegmentStorage() + segment_storage = InMemorySegmentStorage() + evaluation_facctory = EvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + split1 = from_raw(data["ff"]["d"][0]) + split2 = from_raw(data["ff"]["d"][1]) + split3 = from_raw(data["ff"]["d"][2]) + split4 = from_raw(data["ff"]["d"][3]) + rbs_storage.update([rbs], [], 12) + splits_storage.update([split1, split2, split3, split4], [], 12) + segment = segments.from_raw({'name': 'segment-test', 'added': ['pato@split.io'], 'removed': [], 'till': 123}) + segment_storage.put(segment) + + ctx = evaluation_facctory.context_for('bilal@split.io', ['test_prereq']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'test_prereq', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'test_prereq', {}, ctx)['treatment'] == "def_treatment" + + ctx = evaluation_facctory.context_for('mauro@split.io', ['test_prereq']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'test_prereq', {'email': 'mauro@split.io'}, ctx)['treatment'] == "def_treatment" + + ctx = evaluation_facctory.context_for('pato@split.io', ['test_prereq']) + assert e.eval_with_context('pato@split.io', 'pato@split.io', 'test_prereq', {'email': 'pato@split.io'}, ctx)['treatment'] == "def_treatment" + + ctx = evaluation_facctory.context_for('nico@split.io', ['test_prereq']) + assert e.eval_with_context('nico@split.io', 'nico@split.io', 'test_prereq', {'email': 'nico@split.io'}, ctx)['treatment'] == "on" + + ctx = evaluation_facctory.context_for('bilal@split.io', ['prereq_chain']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'prereq_chain', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on_whitelist" + + ctx = evaluation_facctory.context_for('nico@split.io', ['prereq_chain']) + assert e.eval_with_context('nico@split.io', 'nico@split.io', 'test_prereq', {'email': 'nico@split.io'}, ctx)['treatment'] == "on" + + ctx = evaluation_facctory.context_for('pato@split.io', ['prereq_chain']) + assert e.eval_with_context('pato@split.io', 'pato@split.io', 'prereq_chain', {'email': 'pato@split.io'}, ctx)['treatment'] == "on_default" + + ctx = evaluation_facctory.context_for('mauro@split.io', ['prereq_chain']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'prereq_chain', {'email': 'mauro@split.io'}, ctx)['treatment'] == "on_default" + @pytest.mark.asyncio async def test_evaluate_treatment_with_rbs_in_condition_async(self): e = evaluator.Evaluator(splitters.Splitter()) @@ -388,16 +444,63 @@ async def test_using_rbs_in_excluded_async(self): ctx = await evaluation_facctory.context_for('bilal2@split.io', ['some']) assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "off" + @pytest.mark.asyncio + async def test_prerequisites(self): + splits_load = os.path.join(os.path.dirname(__file__), '../models/grammar/files', 'splits_prereq.json') + with open(splits_load, 'r') as flo: + data = json.loads(flo.read()) + e = evaluator.Evaluator(splitters.Splitter()) + splits_storage = InMemorySplitStorageAsync() + rbs_storage = InMemoryRuleBasedSegmentStorageAsync() + segment_storage = InMemorySegmentStorageAsync() + evaluation_facctory = AsyncEvaluationDataFactory(splits_storage, segment_storage, rbs_storage) + + rbs = rule_based_segments.from_raw(data["rbs"]["d"][0]) + split1 = from_raw(data["ff"]["d"][0]) + split2 = from_raw(data["ff"]["d"][1]) + split3 = from_raw(data["ff"]["d"][2]) + split4 = from_raw(data["ff"]["d"][3]) + await rbs_storage.update([rbs], [], 12) + await splits_storage.update([split1, split2, split3, split4], [], 12) + segment = segments.from_raw({'name': 'segment-test', 'added': ['pato@split.io'], 'removed': [], 'till': 123}) + await segment_storage.put(segment) + + ctx = await evaluation_facctory.context_for('bilal@split.io', ['test_prereq']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'test_prereq', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on" + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'test_prereq', {}, ctx)['treatment'] == "def_treatment" + + ctx = await evaluation_facctory.context_for('mauro@split.io', ['test_prereq']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'test_prereq', {'email': 'mauro@split.io'}, ctx)['treatment'] == "def_treatment" + + ctx = await evaluation_facctory.context_for('pato@split.io', ['test_prereq']) + assert e.eval_with_context('pato@split.io', 'pato@split.io', 'test_prereq', {'email': 'pato@split.io'}, ctx)['treatment'] == "def_treatment" + + ctx = await evaluation_facctory.context_for('nico@split.io', ['test_prereq']) + assert e.eval_with_context('nico@split.io', 'nico@split.io', 'test_prereq', {'email': 'nico@split.io'}, ctx)['treatment'] == "on" + + ctx = await evaluation_facctory.context_for('bilal@split.io', ['prereq_chain']) + assert e.eval_with_context('bilal@split.io', 'bilal@split.io', 'prereq_chain', {'email': 'bilal@split.io'}, ctx)['treatment'] == "on_whitelist" + + ctx = await evaluation_facctory.context_for('nico@split.io', ['prereq_chain']) + assert e.eval_with_context('nico@split.io', 'nico@split.io', 'test_prereq', {'email': 'nico@split.io'}, ctx)['treatment'] == "on" + + ctx = await evaluation_facctory.context_for('pato@split.io', ['prereq_chain']) + assert e.eval_with_context('pato@split.io', 'pato@split.io', 'prereq_chain', {'email': 'pato@split.io'}, ctx)['treatment'] == "on_default" + + ctx = await evaluation_facctory.context_for('mauro@split.io', ['prereq_chain']) + assert e.eval_with_context('mauro@split.io', 'mauro@split.io', 'prereq_chain', {'email': 'mauro@split.io'}, ctx)['treatment'] == "on_default" + class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" def test_get_context(self): """Test context.""" - mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False, [Prerequisites('split2', ['on'])]) + split2 = Split('split2', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False, []) flag_storage = InMemorySplitStorage([]) segment_storage = InMemorySegmentStorage() rbs_segment_storage = InMemoryRuleBasedSegmentStorage() - flag_storage.update([mocked_split], [], -1) + flag_storage.update([mocked_split, split2], [], -1) rbs = copy.deepcopy(rbs_raw) rbs['conditions'].append( {"matcherGroup": { @@ -421,6 +524,7 @@ def test_get_context(self): ec = eval_factory.context_for('bilal@split.io', ['some']) assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": False} + assert ec.flags.get("split2").name == "split2" segment_storage.update("employees", {"mauro@split.io"}, {}, 1234) ec = eval_factory.context_for('mauro@split.io', ['some']) @@ -433,11 +537,12 @@ class EvaluationDataFactoryAsyncTests(object): @pytest.mark.asyncio async def test_get_context(self): """Test context.""" - mocked_split = Split('some', 123, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False) + mocked_split = Split('some', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False, [Prerequisites('split2', ['on'])]) + split2 = Split('split2', 12345, False, 'off', 'user', Status.ACTIVE, 12, split_conditions, 1.2, 100, 1234, {}, None, False, []) flag_storage = InMemorySplitStorageAsync([]) segment_storage = InMemorySegmentStorageAsync() rbs_segment_storage = InMemoryRuleBasedSegmentStorageAsync() - await flag_storage.update([mocked_split], [], -1) + await flag_storage.update([mocked_split, split2], [], -1) rbs = copy.deepcopy(rbs_raw) rbs['conditions'].append( {"matcherGroup": { @@ -461,6 +566,7 @@ async def test_get_context(self): ec = await eval_factory.context_for('bilal@split.io', ['some']) assert ec.rbs_segments == {'sample_rule_based_segment': rbs} assert ec.segment_memberships == {"employees": False} + assert ec.flags.get("split2").name == "split2" await segment_storage.update("employees", {"mauro@split.io"}, {}, 1234) ec = await eval_factory.context_for('mauro@split.io', ['some']) From e153509d52c3210280192500d8d3a508ee087db8 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Fri, 30 May 2025 11:32:22 -0700 Subject: [PATCH 39/56] polish --- splitio/models/grammar/matchers/prerequisites.py | 3 --- tests/models/grammar/test_matchers.py | 7 +------ 2 files changed, 1 insertion(+), 9 deletions(-) diff --git a/splitio/models/grammar/matchers/prerequisites.py b/splitio/models/grammar/matchers/prerequisites.py index d0a62eba..799df5c4 100644 --- a/splitio/models/grammar/matchers/prerequisites.py +++ b/splitio/models/grammar/matchers/prerequisites.py @@ -28,9 +28,6 @@ def match(self, key, attributes=None, context=None): if self._prerequisites == None: return True - if not isinstance(key, str): - return False - evaluator = context.get('evaluator') bucketing_key = context.get('bucketing_key') for prerequisite in self._prerequisites: diff --git a/tests/models/grammar/test_matchers.py b/tests/models/grammar/test_matchers.py index c63aa1c7..71922431 100644 --- a/tests/models/grammar/test_matchers.py +++ b/tests/models/grammar/test_matchers.py @@ -1166,9 +1166,4 @@ def test_matcher_behaviour(self, mocker): assert parsed.match('SPLIT_2', {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is True evaluator.eval_with_context.return_value = {'treatment': 'off'} - assert parsed.match('SPLIT_2', {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False - - assert parsed.match([], {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False - assert parsed.match({}, {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False - assert parsed.match(123, {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False - assert parsed.match(object(), {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False + assert parsed.match('SPLIT_2', {}, {'evaluator': evaluator, 'ec': [{'flags': ['prereq_chain'], 'segment_memberships': {}}]}) is False \ No newline at end of file From b64948dc14cef75893953f7bd41a4ee2da83579d Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 2 Jun 2025 09:54:48 -0700 Subject: [PATCH 40/56] fixed rbs matcher --- splitio/models/grammar/matchers/rule_based_segment.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index 81777f0d..6c89c98c 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -63,7 +63,7 @@ def _match_dep_rb_segments(self, excluded_rb_segments, key, attributes, context) else: excluded_segment = context['ec'].rbs_segments.get(excluded_rb_segment.name) if key in excluded_segment.excluded.get_excluded_keys(): - return True + return False if self._match_dep_rb_segments(excluded_segment.excluded.get_excluded_segments(), key, attributes, context): return True From c30a18b50a26a05116205fefcf90c423c992bdcd Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Mon, 2 Jun 2025 10:16:14 -0700 Subject: [PATCH 41/56] fixed tests --- tests/engine/test_evaluator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/engine/test_evaluator.py b/tests/engine/test_evaluator.py index 99f12cd7..a2937126 100644 --- a/tests/engine/test_evaluator.py +++ b/tests/engine/test_evaluator.py @@ -314,7 +314,7 @@ def test_using_rbs_in_excluded(self): ctx = evaluation_facctory.context_for('bilal', ['some']) assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" ctx = evaluation_facctory.context_for('bilal2@split.io', ['some']) - assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "off" + assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "on" @pytest.mark.asyncio async def test_evaluate_treatment_with_rbs_in_condition_async(self): @@ -386,7 +386,7 @@ async def test_using_rbs_in_excluded_async(self): ctx = await evaluation_facctory.context_for('bilal', ['some']) assert e.eval_with_context('bilal', 'bilal', 'some', {'email': 'bilal'}, ctx)['treatment'] == "on" ctx = await evaluation_facctory.context_for('bilal2@split.io', ['some']) - assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "off" + assert e.eval_with_context('bilal2@split.io', 'bilal2@split.io', 'some', {'email': 'bilal2@split.io'}, ctx)['treatment'] == "on" class EvaluationDataFactoryTests(object): """Test evaluation factory class.""" From c174578d839c1f76b38318fd130e3a25773a1899 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Tue, 3 Jun 2025 09:27:23 -0700 Subject: [PATCH 42/56] Updated localhostjson sync --- splitio/sync/split.py | 6 +++++- tests/integration/__init__.py | 2 +- tests/sync/test_splits_synchronizer.py | 4 ++++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 1d1722f6..e5d1f645 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -433,7 +433,8 @@ def _make_feature_flag(feature_flag_name, conditions, configs=None): 'defaultTreatment': 'control', 'algo': 2, 'conditions': conditions, - 'configurations': configs + 'configurations': configs, + 'prerequisites': [] }) @staticmethod @@ -542,6 +543,8 @@ def _sanitize_feature_flag_elements(self, parsed_feature_flags): if 'sets' not in feature_flag: feature_flag['sets'] = [] feature_flag['sets'] = validate_flag_sets(feature_flag['sets'], 'Localhost Validator') + if 'prerequisites' not in feature_flag: + feature_flag['prerequisites'] = [] sanitized_feature_flags.append(feature_flag) return sanitized_feature_flags @@ -560,6 +563,7 @@ def _sanitize_rb_segment_elements(self, parsed_rb_segments): if 'name' not in rb_segment or rb_segment['name'].strip() == '': _LOGGER.warning("A rule based segment in json file does not have (Name) or property is empty, skipping.") continue + for element in [('trafficTypeName', 'user', None, None, None, None), ('status', splits.Status.ACTIVE.value, None, None, [e.value for e in splits.Status], None), ('changeNumber', 0, 0, None, None, None)]: diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index bec5cd6f..845e8c72 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -3,7 +3,7 @@ rbsegments_json = [{"changeNumber": 12, "name": "some_segment", "status": "ACTIVE","trafficTypeName": "user","excluded":{"keys":[],"segments":[]},"conditions": []}] split11 = {"ff": {"t": 1675443569027, "s": -1, "d": [ - {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": False}, + {"trafficTypeName": "user", "name": "SPLIT_2","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": False, 'prerequisites': []}, {"trafficTypeName": "user", "name": "SPLIT_1", "trafficAllocation": 100, "trafficAllocationSeed": -1780071202,"seed": -1442762199, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443537882,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT", "matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 0 },{ "treatment": "off", "size": 100 }],"label": "default rule"}], "sets": ["set_1", "set_2"]}, {"trafficTypeName": "user", "name": "SPLIT_3","trafficAllocation": 100,"trafficAllocationSeed": 1057590779, "seed": -113875324, "status": "ACTIVE","killed": False, "defaultTreatment": "off", "changeNumber": 1675443569027,"algo": 2, "configurations": {},"conditions": [{"conditionType": "ROLLOUT","matcherGroup": {"combiner": "AND","matchers": [{"keySelector": { "trafficType": "user", "attribute": None },"matcherType": "ALL_KEYS","negate": False,"userDefinedSegmentMatcherData": None,"whitelistMatcherData": None,"unaryNumericMatcherData": None,"betweenMatcherData": None,"booleanMatcherData": None,"dependencyMatcherData": None,"stringMatcherData": None}]},"partitions": [{ "treatment": "on", "size": 100 },{ "treatment": "off", "size": 0 }],"label": "default rule"}], "sets": ["set_1"], "impressionsDisabled": True} ]}, "rbs": {"t": -1, "s": -1, "d": rbsegments_json}} diff --git a/tests/sync/test_splits_synchronizer.py b/tests/sync/test_splits_synchronizer.py index c0ea38fb..fd9ac585 100644 --- a/tests/sync/test_splits_synchronizer.py +++ b/tests/sync/test_splits_synchronizer.py @@ -1185,6 +1185,10 @@ def test_elements_sanitization(self, mocker): split[0]['algo'] = 1 assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['algo'] == 2) + split = splits_json["splitChange1_1"]['ff']['d'].copy() + del split[0]['prerequisites'] + assert (split_synchronizer._sanitize_feature_flag_elements(split)[0]['prerequisites'] == []) + # test 'status' is set to ACTIVE when None rbs = copy.deepcopy(json_body["rbs"]["d"]) rbs[0]['status'] = None From de2f0138729be130ee1eb71ffceed949438a3206 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Tue, 3 Jun 2025 10:25:52 -0700 Subject: [PATCH 43/56] Updated integrations tests --- tests/client/test_input_validator.py | 17 ++++++++ tests/integration/files/splitChanges.json | 48 ++++++++++++++++++++++- tests/integration/test_client_e2e.py | 28 +++++++++++-- 3 files changed, 87 insertions(+), 6 deletions(-) diff --git a/tests/client/test_input_validator.py b/tests/client/test_input_validator.py index 2f15d038..0659ee43 100644 --- a/tests/client/test_input_validator.py +++ b/tests/client/test_input_validator.py @@ -28,6 +28,7 @@ def test_get_treatment(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=SplitStorage) storage_mock.fetch_many.return_value = {'some_feature': split_mock} rbs_storage = mocker.Mock(spec=InMemoryRuleBasedSegmentStorage) @@ -264,6 +265,7 @@ def test_get_treatment_with_config(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] def _configs(treatment): return '{"some": "property"}' if treatment == 'default_treatment' else None @@ -819,6 +821,8 @@ def test_get_treatments(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] + storage_mock = mocker.Mock(spec=SplitStorage) storage_mock.fetch_many.return_value = { 'some_feature': split_mock @@ -965,6 +969,7 @@ def test_get_treatments_with_config(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=SplitStorage) storage_mock.fetch_many.return_value = { @@ -1113,6 +1118,7 @@ def test_get_treatments_by_flag_set(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=InMemorySplitStorage) storage_mock.fetch_many.return_value = { 'some_feature': split_mock @@ -1231,6 +1237,7 @@ def test_get_treatments_by_flag_sets(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=InMemorySplitStorage) storage_mock.fetch_many.return_value = { 'some_feature': split_mock @@ -1358,6 +1365,7 @@ def _configs(treatment): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=InMemorySplitStorage) storage_mock.fetch_many.return_value = { 'some_feature': split_mock @@ -1481,6 +1489,7 @@ def _configs(treatment): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=InMemorySplitStorage) storage_mock.fetch_many.return_value = { 'some_feature': split_mock @@ -1632,6 +1641,7 @@ async def test_get_treatment(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=SplitStorage) async def fetch_many(*_): return { @@ -1889,6 +1899,7 @@ async def test_get_treatment_with_config(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] def _configs(treatment): return '{"some": "property"}' if treatment == 'default_treatment' else None @@ -2423,6 +2434,7 @@ async def test_get_treatments(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=SplitStorage) async def get(*_): return split_mock @@ -2586,6 +2598,7 @@ async def test_get_treatments_with_config(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=SplitStorage) async def get(*_): @@ -2749,6 +2762,7 @@ async def test_get_treatments_by_flag_set(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=SplitStorage) async def get(*_): return split_mock @@ -2893,6 +2907,7 @@ async def test_get_treatments_by_flag_sets(self, mocker): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=SplitStorage) async def get(*_): return split_mock @@ -3048,6 +3063,7 @@ def _configs(treatment): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=SplitStorage) async def get(*_): return split_mock @@ -3195,6 +3211,7 @@ def _configs(treatment): conditions_mock = mocker.PropertyMock() conditions_mock.return_value = [] type(split_mock).conditions = conditions_mock + type(split_mock).prerequisites = [] storage_mock = mocker.Mock(spec=SplitStorage) async def get(*_): return split_mock diff --git a/tests/integration/files/splitChanges.json b/tests/integration/files/splitChanges.json index d9ab1c24..84f7c2cd 100644 --- a/tests/integration/files/splitChanges.json +++ b/tests/integration/files/splitChanges.json @@ -23,7 +23,8 @@ "userDefinedSegmentMatcherData": null, "whitelistMatcherData": { "whitelist": [ - "whitelisted_user" + "whitelisted_user", + "user1234" ] } } @@ -394,7 +395,50 @@ "configurations": {}, "sets": [], "impressionsDisabled": false - } + }, + { + "orgId": null, + "environment": null, + "trafficTypeId": null, + "trafficTypeName": null, + "name": "prereq_feature", + "seed": 1699838640, + "status": "ACTIVE", + "killed": false, + "changeNumber": 123, + "defaultTreatment": "off_default", + "conditions": [ + { + "conditionType": "ROLLOUT", + "matcherGroup": { + "combiner": "AND", + "matchers": [ + { + "matcherType": "ALL_KEYS", + "negate": false, + "userDefinedSegmentMatcherData": null, + "whitelistMatcherData": null + } + ] + }, + "partitions": [ + { + "treatment": "on", + "size": 100 + }, + { + "treatment": "off", + "size": 0 + } + ] + } + ], + "sets": [], + "prerequisites": [ + {"n": "regex_test", "ts": ["on"]}, + {"n": "whitelist_feature", "ts": ["off"]} + ] + } ], "s": -1, "t": 1457726098069 diff --git a/tests/integration/test_client_e2e.py b/tests/integration/test_client_e2e.py index f16352e3..f50869cf 100644 --- a/tests/integration/test_client_e2e.py +++ b/tests/integration/test_client_e2e.py @@ -171,6 +171,16 @@ def _get_treatment(factory, skip_rbs=False): if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): _validate_last_impressions(client, ('rbs_feature_flag', 'mauro@split.io', 'off')) + # test prerequisites matcher + assert client.get_treatment('abc4', 'prereq_feature') == 'on' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + _validate_last_impressions(client, ('prereq_feature', 'abc4', 'on')) + + # test prerequisites matcher + assert client.get_treatment('user1234', 'prereq_feature') == 'off_default' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + _validate_last_impressions(client, ('prereq_feature', 'user1234', 'off_default')) + def _get_treatment_with_config(factory): """Test client.get_treatment_with_config().""" try: @@ -460,8 +470,8 @@ def _manager_methods(factory, skip_rbs=False): assert len(manager.splits()) == 7 return - assert len(manager.split_names()) == 8 - assert len(manager.splits()) == 8 + assert len(manager.split_names()) == 9 + assert len(manager.splits()) == 9 class InMemoryDebugIntegrationTests(object): """Inmemory storage-based integration tests.""" @@ -4458,6 +4468,16 @@ async def _get_treatment_async(factory, skip_rbs=False): if skip_rbs: return + + # test prerequisites matcher + assert await client.get_treatment('abc4', 'prereq_feature') == 'on' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + await _validate_last_impressions_async(client, ('prereq_feature', 'abc4', 'on')) + + # test prerequisites matcher + assert await client.get_treatment('user1234', 'prereq_feature') == 'off_default' + if not isinstance(factory._recorder._impressions_manager._strategy, StrategyNoneMode): + await _validate_last_impressions_async(client, ('prereq_feature', 'user1234', 'off_default')) # test rule based segment matcher assert await client.get_treatment('bilal@split.io', 'rbs_feature_flag', {'email': 'bilal@split.io'}) == 'on' @@ -4758,5 +4778,5 @@ async def _manager_methods_async(factory, skip_rbs=False): assert len(await manager.splits()) == 7 return - assert len(await manager.split_names()) == 8 - assert len(await manager.splits()) == 8 + assert len(await manager.split_names()) == 9 + assert len(await manager.splits()) == 9 From 94f075599daefc031613bb1c987f3b1e526a9a14 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Thu, 5 Jun 2025 11:25:27 -0700 Subject: [PATCH 44/56] updated version and changes --- CHANGES.txt | 4 ++++ splitio/version.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index 52688577..8524a1b5 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,3 +1,7 @@ +10.3.0 (Jun xx, 2025) +- Added support for rule-based segments. These segments determine membership at runtime by evaluating their configured rules against the user attributes provided to the SDK. +- Added support for feature flag prerequisites. This allows customers to define dependency conditions between flags, which are evaluated before any allowlists or targeting rules. + 10.2.0 (Jan 17, 2025) - Added support for the new impressions tracking toggle available on feature flags, both respecting the setting and including the new field being returned on SplitView type objects. Read more in our docs. diff --git a/splitio/version.py b/splitio/version.py index bb552668..8d2afd7b 100644 --- a/splitio/version.py +++ b/splitio/version.py @@ -1 +1 @@ -__version__ = '10.3.0-rc2' \ No newline at end of file +__version__ = '10.3.0' \ No newline at end of file From 24c65c127b25907d04d55a04f1e577284fa8500c Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Thu, 5 Jun 2025 12:28:04 -0700 Subject: [PATCH 45/56] Update ci.yml --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eafd6e2f..0b4efac2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ concurrency: jobs: test: name: Test - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 services: redis: image: redis From f876ebea3286aac416f17d8a2189044ff68cd272 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Thu, 5 Jun 2025 12:30:16 -0700 Subject: [PATCH 46/56] Update ci.yml --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0b4efac2..00920d11 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ concurrency: jobs: test: name: Test - runs-on: ubuntu-24.04 + runs-on: ubuntu-22.04 services: redis: image: redis From 596ebeddcf0572442d9797fdc7bb4bbb6a5edf89 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Thu, 5 Jun 2025 12:44:09 -0700 Subject: [PATCH 47/56] Update ci.yml --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 00920d11..c4fd3244 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -31,7 +31,7 @@ jobs: - name: Setup Python uses: actions/setup-python@v3 with: - python-version: '3.7.16' + python-version: '3.8.18' - name: Install dependencies run: | From ff90620ce7d3fcc0db65de2ad9469263970eba20 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Thu, 5 Jun 2025 12:45:04 -0700 Subject: [PATCH 48/56] Update ci.yml --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c4fd3244..d6580f33 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ concurrency: jobs: test: name: Test - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 services: redis: image: redis From ace5a589ede0c9347da4bb070a04c7b5e7047bd0 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Thu, 5 Jun 2025 12:47:59 -0700 Subject: [PATCH 49/56] Update ci.yml --- .github/workflows/ci.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d6580f33..9e8b35c7 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ concurrency: jobs: test: name: Test - runs-on: ubuntu-24.04 + runs-on: ubuntu-22.04 services: redis: image: redis @@ -31,11 +31,10 @@ jobs: - name: Setup Python uses: actions/setup-python@v3 with: - python-version: '3.8.18' + python-version: '3.7.16' - name: Install dependencies run: | - sudo apt-get install -y libkrb5-dev pip install -U setuptools pip wheel pip install -e .[cpphash,redis,uwsgi] From b64f84e7ce9fe7c5a7bad0d0556b06c1972b099d Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Thu, 5 Jun 2025 12:57:32 -0700 Subject: [PATCH 50/56] Update ci.yml --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9e8b35c7..00920d11 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,6 +35,7 @@ jobs: - name: Install dependencies run: | + sudo apt-get install -y libkrb5-dev pip install -U setuptools pip wheel pip install -e .[cpphash,redis,uwsgi] From a46281976483904175502d2d004fa9fdbfd481a0 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Thu, 5 Jun 2025 13:04:19 -0700 Subject: [PATCH 51/56] Update ci.yml --- .github/workflows/ci.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 00920d11..52cfda4f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -35,6 +35,7 @@ jobs: - name: Install dependencies run: | + sudo apt update sudo apt-get install -y libkrb5-dev pip install -U setuptools pip wheel pip install -e .[cpphash,redis,uwsgi] From 9349b4797485e1e75e807867a27ffb877b1c73b1 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Thu, 5 Jun 2025 14:39:41 -0700 Subject: [PATCH 52/56] downgrade urllib version for tests --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 5e78817a..1e1928fc 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ 'aiohttp>=3.8.4', 'aiofiles>=23.1.0', 'requests-kerberos>=0.15.0', - 'urllib3==2.2.0' + 'urllib3==2.0.7' ] INSTALL_REQUIRES = [ From 0611432256cea8553c9cf9dd647e1675dcc58a30 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Mon, 16 Jun 2025 08:43:36 -0700 Subject: [PATCH 53/56] Update CHANGES.txt --- CHANGES.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index 8524a1b5..8c218d00 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,4 +1,4 @@ -10.3.0 (Jun xx, 2025) +10.3.0 (Jun 16, 2025) - Added support for rule-based segments. These segments determine membership at runtime by evaluating their configured rules against the user attributes provided to the SDK. - Added support for feature flag prerequisites. This allows customers to define dependency conditions between flags, which are evaluated before any allowlists or targeting rules. From 41ea68e7dc006fd9713ebb18248b1df7d0ba5662 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany <41021307+chillaq@users.noreply.github.com> Date: Mon, 16 Jun 2025 19:12:48 -0700 Subject: [PATCH 54/56] Update CHANGES.txt --- CHANGES.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGES.txt b/CHANGES.txt index 8c218d00..d60d05ef 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -1,4 +1,4 @@ -10.3.0 (Jun 16, 2025) +10.3.0 (Jun 17, 2025) - Added support for rule-based segments. These segments determine membership at runtime by evaluating their configured rules against the user attributes provided to the SDK. - Added support for feature flag prerequisites. This allows customers to define dependency conditions between flags, which are evaluated before any allowlists or targeting rules. From 8dffce590c8d0e27d9265da3657af2d257e95782 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Tue, 17 Jun 2025 10:38:53 -0700 Subject: [PATCH 55/56] polish --- splitio/engine/evaluator.py | 42 +++++++++++------- .../grammar/matchers/rule_based_segment.py | 8 ++-- splitio/push/workers.py | 43 +++++++++++++------ splitio/sync/split.py | 7 ++- 4 files changed, 63 insertions(+), 37 deletions(-) diff --git a/splitio/engine/evaluator.py b/splitio/engine/evaluator.py index 5cbbd205..26875a68 100644 --- a/splitio/engine/evaluator.py +++ b/splitio/engine/evaluator.py @@ -57,23 +57,9 @@ def eval_with_context(self, key, bucketing, feature_name, attrs, ctx): label = Label.KILLED _treatment = feature.default_treatment else: - if feature.prerequisites is not None: - prerequisites_matcher = PrerequisitesMatcher(feature.prerequisites) - if not prerequisites_matcher.match(key, attrs, { - 'evaluator': self, - 'bucketing_key': bucketing, - 'ec': ctx}): - label = Label.PREREQUISITES_NOT_MET - _treatment = feature.default_treatment + label, _treatment = self._check_prerequisites(feature, bucketing, key, attrs, ctx, label, _treatment) + label, _treatment = self._get_treatment(feature, bucketing, key, attrs, ctx, label, _treatment) - if _treatment == CONTROL: - treatment, label = self._treatment_for_flag(feature, key, bucketing, attrs, ctx) - if treatment is None: - label = Label.NO_CONDITION_MATCHED - _treatment = feature.default_treatment - else: - _treatment = treatment - return { 'treatment': _treatment, 'configurations': feature.get_configurations_for(_treatment) if feature else None, @@ -84,6 +70,30 @@ def eval_with_context(self, key, bucketing, feature_name, attrs, ctx): 'impressions_disabled': feature.impressions_disabled if feature else None } + def _get_treatment(self, feature, bucketing, key, attrs, ctx, label, _treatment): + if _treatment == CONTROL: + treatment, label = self._treatment_for_flag(feature, key, bucketing, attrs, ctx) + if treatment is None: + label = Label.NO_CONDITION_MATCHED + _treatment = feature.default_treatment + else: + _treatment = treatment + + return label, _treatment + + def _check_prerequisites(self, feature, bucketing, key, attrs, ctx, label, _treatment): + if feature.prerequisites is not None: + prerequisites_matcher = PrerequisitesMatcher(feature.prerequisites) + if not prerequisites_matcher.match(key, attrs, { + 'evaluator': self, + 'bucketing_key': bucketing, + 'ec': ctx}): + label = Label.PREREQUISITES_NOT_MET + _treatment = feature.default_treatment + + return label, _treatment + + def _treatment_for_flag(self, flag, key, bucketing, attributes, ctx): """ ... diff --git a/splitio/models/grammar/matchers/rule_based_segment.py b/splitio/models/grammar/matchers/rule_based_segment.py index 6c89c98c..6e4c8023 100644 --- a/splitio/models/grammar/matchers/rule_based_segment.py +++ b/splitio/models/grammar/matchers/rule_based_segment.py @@ -65,10 +65,8 @@ def _match_dep_rb_segments(self, excluded_rb_segments, key, attributes, context) if key in excluded_segment.excluded.get_excluded_keys(): return False - if self._match_dep_rb_segments(excluded_segment.excluded.get_excluded_segments(), key, attributes, context): + if self._match_dep_rb_segments(excluded_segment.excluded.get_excluded_segments(), key, attributes, context) \ + or self._match_conditions(excluded_segment.conditions, key, attributes, context): return True - - if self._match_conditions(excluded_segment.conditions, key, attributes, context): - return True - + return False diff --git a/splitio/push/workers.py b/splitio/push/workers.py index e4888f36..e0dd8369 100644 --- a/splitio/push/workers.py +++ b/splitio/push/workers.py @@ -35,6 +35,8 @@ class CompressionMode(Enum): class WorkerBase(object, metaclass=abc.ABCMeta): """Worker template.""" + _fetching_segment = "Fetching new segment {segment_name}" + @abc.abstractmethod def is_running(self): """Return whether the working is running.""" @@ -226,20 +228,18 @@ def _apply_iff_if_needed(self, event): segment_list = update_feature_flag_storage(self._feature_flag_storage, [new_feature_flag], event.change_number) for segment_name in segment_list: if self._segment_storage.get(segment_name) is None: - _LOGGER.debug('Fetching new segment %s', segment_name) + _LOGGER.debug(self._fetching_segment.format(segment_name=segment_name)) self._segment_handler(segment_name, event.change_number) referenced_rbs = self._get_referenced_rbs(new_feature_flag) - if len(referenced_rbs) > 0 and not self._rule_based_segment_storage.contains(referenced_rbs): - _LOGGER.debug('Fetching new rule based segment(s) %s', referenced_rbs) - self._handler(None, event.change_number) + self._fetch_rbs_segment_if_needed(referenced_rbs, event) self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.SPLIT_UPDATE) else: new_rbs = rbs_from_raw(json.loads(self._get_object_definition(event))) segment_list = update_rule_based_segment_storage(self._rule_based_segment_storage, [new_rbs], event.change_number) for segment_name in segment_list: if self._segment_storage.get(segment_name) is None: - _LOGGER.debug('Fetching new segment %s', segment_name) + _LOGGER.debug(self._fetching_segment.format(segment_name=segment_name)) self._segment_handler(segment_name, event.change_number) self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.RBS_UPDATE) return True @@ -247,6 +247,11 @@ def _apply_iff_if_needed(self, event): except Exception as e: raise SplitStorageException(e) + def _fetch_rbs_segment_if_needed(self, referenced_rbs, event): + if len(referenced_rbs) > 0 and not self._rule_based_segment_storage.contains(referenced_rbs): + _LOGGER.debug('Fetching new rule based segment(s) %s', referenced_rbs) + self._handler(None, event.change_number) + def _check_instant_ff_update(self, event): if event.update_type == UpdateType.SPLIT_UPDATE and event.compression is not None and event.previous_change_number == self._feature_flag_storage.get_change_number(): return True @@ -264,16 +269,15 @@ def _run(self): break if event == self._centinel: continue + _LOGGER.debug('Processing feature flag update %d', event.change_number) try: if self._apply_iff_if_needed(event): continue + till = None rbs_till = None - if event.update_type == UpdateType.SPLIT_UPDATE: - till = event.change_number - else: - rbs_till = event.change_number + till, rbs_till = self._check_update_type(till, rbs_till, event) sync_result = self._handler(till, rbs_till) if not sync_result.success and sync_result.error_code is not None and sync_result.error_code == 414: _LOGGER.error("URI too long exception caught, sync failed") @@ -288,6 +292,14 @@ def _run(self): _LOGGER.error('Exception raised in feature flag synchronization') _LOGGER.debug('Exception information: ', exc_info=True) + def _check_update_type(self, till, rbs_till, event): + if event.update_type == UpdateType.SPLIT_UPDATE: + till = event.change_number + else: + rbs_till = event.change_number + + return till, rbs_till + def start(self): """Start worker.""" if self.is_running(): @@ -354,20 +366,18 @@ async def _apply_iff_if_needed(self, event): segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, [new_feature_flag], event.change_number) for segment_name in segment_list: if await self._segment_storage.get(segment_name) is None: - _LOGGER.debug('Fetching new segment %s', segment_name) + _LOGGER.debug(self._fetching_segment.format(segment_name=segment_name)) await self._segment_handler(segment_name, event.change_number) referenced_rbs = self._get_referenced_rbs(new_feature_flag) - if len(referenced_rbs) > 0 and not await self._rule_based_segment_storage.contains(referenced_rbs): - await self._handler(None, event.change_number) - + await self._fetch_rbs_segment_if_needed(referenced_rbs, event) await self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.SPLIT_UPDATE) else: new_rbs = rbs_from_raw(json.loads(self._get_object_definition(event))) segment_list = await update_rule_based_segment_storage_async(self._rule_based_segment_storage, [new_rbs], event.change_number) for segment_name in segment_list: if await self._segment_storage.get(segment_name) is None: - _LOGGER.debug('Fetching new segment %s', segment_name) + _LOGGER.debug(self._fetching_segment.format(segment_name=segment_name)) await self._segment_handler(segment_name, event.change_number) await self._telemetry_runtime_producer.record_update_from_sse(UpdateFromSSE.RBS_UPDATE) return True @@ -375,6 +385,11 @@ async def _apply_iff_if_needed(self, event): except Exception as e: raise SplitStorageException(e) + async def _fetch_rbs_segment_if_needed(self, referenced_rbs, event): + if len(referenced_rbs) > 0 and not await self._rule_based_segment_storage.contains(referenced_rbs): + _LOGGER.debug('Fetching new rule based segment(s) %s', referenced_rbs) + await self._handler(None, event.change_number) + async def _check_instant_ff_update(self, event): if event.update_type == UpdateType.SPLIT_UPDATE and event.compression is not None and event.previous_change_number == await self._feature_flag_storage.get_change_number(): return True diff --git a/splitio/sync/split.py b/splitio/sync/split.py index e5d1f645..1735931a 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -75,6 +75,9 @@ def _get_config_sets(self): return ','.join(self._feature_flag_storage.flag_set_filter.sorted_flag_sets) + def _check_exit_conditions(self, till, rbs_till, change_number, rbs_change_number): + return (till is not None and till < change_number) or (rbs_till is not None and rbs_till < rbs_change_number) + class SplitSynchronizer(SplitSynchronizerBase): """Feature Flag changes synchronizer.""" @@ -119,7 +122,7 @@ def _fetch_until(self, fetch_options, till=None, rbs_till=None): if rbs_change_number is None: rbs_change_number = -1 - if (till is not None and till < change_number) or (rbs_till is not None and rbs_till < rbs_change_number): + if self._check_exit_conditions(till, rbs_till, change_number, rbs_change_number): # the passed till is less than change_number, no need to perform updates return change_number, rbs_change_number, segment_list @@ -278,7 +281,7 @@ async def _fetch_until(self, fetch_options, till=None, rbs_till=None): if rbs_change_number is None: rbs_change_number = -1 - if (till is not None and till < change_number) or (rbs_till is not None and rbs_till < rbs_change_number): + if self._check_exit_conditions(till, rbs_till, change_number, rbs_change_number): # the passed till is less than change_number, no need to perform updates return change_number, rbs_change_number, segment_list From 8143774d21d526d2daac0fd67c3852ac5edbb869 Mon Sep 17 00:00:00 2001 From: Bilal Al-Shahwany Date: Tue, 17 Jun 2025 11:08:05 -0700 Subject: [PATCH 56/56] polish --- splitio/sync/split.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/splitio/sync/split.py b/splitio/sync/split.py index 1735931a..c1b5aa39 100644 --- a/splitio/sync/split.py +++ b/splitio/sync/split.py @@ -78,6 +78,9 @@ def _get_config_sets(self): def _check_exit_conditions(self, till, rbs_till, change_number, rbs_change_number): return (till is not None and till < change_number) or (rbs_till is not None and rbs_till < rbs_change_number) + def _check_return_conditions(self, feature_flag_changes): + return feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s'] + class SplitSynchronizer(SplitSynchronizerBase): """Feature Flag changes synchronizer.""" @@ -145,7 +148,7 @@ def _fetch_until(self, fetch_options, till=None, rbs_till=None): segment_list.update(update_feature_flag_storage(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t'], self._api.clear_storage)) segment_list.update(rbs_segment_list) - if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: + if self._check_return_conditions(feature_flag_changes): return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list def _attempt_feature_flag_sync(self, fetch_options, till=None, rbs_till=None): @@ -304,7 +307,7 @@ async def _fetch_until(self, fetch_options, till=None, rbs_till=None): segment_list = await update_feature_flag_storage_async(self._feature_flag_storage, fetched_feature_flags, feature_flag_changes.get('ff')['t'], self._api.clear_storage) segment_list.update(rbs_segment_list) - if feature_flag_changes.get('ff')['t'] == feature_flag_changes.get('ff')['s'] and feature_flag_changes.get('rbs')['t'] == feature_flag_changes.get('rbs')['s']: + if self._check_return_conditions(feature_flag_changes): return feature_flag_changes.get('ff')['t'], feature_flag_changes.get('rbs')['t'], segment_list async def _attempt_feature_flag_sync(self, fetch_options, till=None, rbs_till=None):