diff --git a/MANIFEST.in b/MANIFEST.in index 2bd71dcc..4ec6f0b0 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,5 @@ include requirements.txt include README.txt include test-requirements.txt -include twisted-requirements.txt include redis-requirements.txt include python2.6-requirements.txt \ No newline at end of file diff --git a/README.md b/README.md index 8074e1ea..aed91f43 100644 --- a/README.md +++ b/README.md @@ -40,40 +40,11 @@ Your first feature flag Python 2.6 ---------- -Python 2.6 is supported for polling mode only and requires an extra dependency. Here's how to set it up: +Python 2.6 requires an extra dependency. Here's how to set it up: 1. Use the `python2.6` extra in your requirements.txt: `ldclient-py[python2.6]` -1. Due to Python 2.6's lack of SNI support, LaunchDarkly's streaming flag updates are not available. Set the `stream=False` option in the client config to disable it. You'll still receive flag updates, but via a polling mechanism with efficient caching. Here's an example: - `config = ldclient.Config(stream=False, sdk_key="SDK_KEY")` - - -Twisted -------- -Twisted is supported for LDD mode only. To run in Twisted/LDD mode, - -1. Use this dependency: - - ``` - ldclient-py[twisted]>=3.0.1 - ``` -2. Configure the client: - - ``` - feature_store = TwistedRedisFeatureStore(url='YOUR_REDIS_URL', redis_prefix="ldd-restwrapper", expiration=0) - ldclient.config.feature_store = feature_store - - ldclient.config = ldclient.Config( - use_ldd=use_ldd, - event_consumer_class=TwistedEventConsumer, - ) - ldclient.sdk_key = 'YOUR_SDK_KEY' - ``` -3. Get the client: - - ```client = ldclient.get()``` - Learn more ----------- @@ -104,7 +75,6 @@ About LaunchDarkly * [JavaScript](http://docs.launchdarkly.com/docs/js-sdk-reference "LaunchDarkly JavaScript SDK") * [PHP](http://docs.launchdarkly.com/docs/php-sdk-reference "LaunchDarkly PHP SDK") * [Python](http://docs.launchdarkly.com/docs/python-sdk-reference "LaunchDarkly Python SDK") - * [Python Twisted](http://docs.launchdarkly.com/docs/python-twisted-sdk-reference "LaunchDarkly Python Twisted SDK") * [Go](http://docs.launchdarkly.com/docs/go-sdk-reference "LaunchDarkly Go SDK") * [Node.JS](http://docs.launchdarkly.com/docs/node-sdk-reference "LaunchDarkly Node SDK") * [.NET](http://docs.launchdarkly.com/docs/dotnet-sdk-reference "LaunchDarkly .Net SDK") diff --git a/ldclient/client.py b/ldclient/client.py index bb293297..14a87e04 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -14,6 +14,7 @@ from ldclient.polling import PollingUpdateProcessor from ldclient.streaming import StreamingUpdateProcessor from ldclient.util import check_uwsgi, log +from ldclient.versioned_data_kind import FEATURES, SEGMENTS # noinspection PyBroadException try: @@ -184,7 +185,7 @@ def cb(flag): return default - return self._store.get(key, cb) + return self._store.get(FEATURES, key, cb) def _evaluate(self, flag, user): return evaluate(flag, user, self._store) @@ -223,7 +224,7 @@ def cb(all_flags): log.error("Exception caught in all_flags: " + e.message + " for user: " + str(user)) return {} - return self._store.all(cb) + return self._store.all(FEATURES, cb) def _evaluate_multi(self, user, flags): return dict([(k, self._evaluate(v, user)[0]) for k, v in flags.items() or {}]) diff --git a/ldclient/config.py b/ldclient/config.py index 1ec6a82a..8abd96a8 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -135,6 +135,10 @@ def get_default(self, key, default): def sdk_key(self): return self.__sdk_key + @property + def base_uri(self): + return self.__base_uri + @property def get_latest_flags_uri(self): return self.__base_uri + GET_LATEST_FEATURES_PATH @@ -143,6 +147,10 @@ def get_latest_flags_uri(self): def events_uri(self): return self.__events_uri + '/bulk' + @property + def stream_base_uri(self): + return self.__stream_uri + @property def stream_uri(self): return self.__stream_uri + STREAM_FLAGS_PATH diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 6b71f99d..c29d4d79 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -6,6 +6,10 @@ from ldclient.interfaces import FeatureRequester from ldclient.util import _headers from ldclient.util import log +from ldclient.versioned_data_kind import FEATURES, SEGMENTS + + +LATEST_ALL_URI = '/sdk/latest-all' class FeatureRequesterImpl(FeatureRequester): @@ -14,32 +18,35 @@ def __init__(self, config): self._session_no_cache = requests.Session() self._config = config - def get_all(self): + def get_all_data(self): hdrs = _headers(self._config.sdk_key) - uri = self._config.get_latest_flags_uri + uri = self._config.base_uri + LATEST_ALL_URI r = self._session_cache.get(uri, headers=hdrs, timeout=( self._config.connect_timeout, self._config.read_timeout)) r.raise_for_status() - flags = r.json() - versions_summary = list(map(lambda f: "{0}:{1}".format(f.get("key"), f.get("version")), flags.values())) - log.debug("Get All flags response status:[{0}] From cache?[{1}] ETag:[{2}] flag versions: {3}" - .format(r.status_code, r.from_cache, r.headers.get('ETag'), versions_summary)) - return flags + all_data = r.json() + log.debug("Get All flags response status:[%d] From cache?[%s] ETag:[%s]", + r.status_code, r.from_cache, r.headers.get('ETag')) + return { + FEATURES: all_data['flags'], + SEGMENTS: all_data['segments'] + } - def get_one(self, key): + def get_one(self, kind, key): hdrs = _headers(self._config.sdk_key) - uri = self._config.get_latest_flags_uri + '/' + key - log.debug("Getting one feature flag using uri: " + uri) + path = kind.request_api_path + '/' + key + uri = config.base_uri + path + log.debug("Getting %s from %s using uri: %s", key, kind['namespace'], uri) r = self._session_no_cache.get(uri, headers=hdrs, timeout=( self._config.connect_timeout, self._config.read_timeout)) r.raise_for_status() - flag = r.json() - log.debug("Get one flag response status:[{0}] Flag key:[{1}] version:[{2}]" - .format(r.status_code, key, flag.get("version"))) - return flag + obj = r.json() + log.debug("%s response status:[%d] key:[%s] version:[%d]", + path, r.status_code, key, segment.get("version")) + return obj diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 9daf5f9c..155743ea 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -1,68 +1,76 @@ +from collections import defaultdict from ldclient.util import log from ldclient.interfaces import FeatureStore from ldclient.rwlock import ReadWriteLock class InMemoryFeatureStore(FeatureStore): + """ + In-memory implementation of a store that holds feature flags and related data received from the streaming API. + """ def __init__(self): self._lock = ReadWriteLock() self._initialized = False - self._features = {} + self._items = defaultdict(dict) - def get(self, key, callback): + def get(self, kind, key, callback): try: self._lock.rlock() - f = self._features.get(key) - if f is None: - log.debug("Attempted to get missing feature: " + str(key) + " Returning None") + itemsOfKind = self._items[kind] + item = itemsOfKind.get(key) + if item is None: + log.debug("Attempted to get missing key %s in '%s', returning None", key, kind.namespace) return callback(None) - if 'deleted' in f and f['deleted']: - log.debug("Attempted to get deleted feature: " + str(key) + " Returning None") + if 'deleted' in item and item['deleted']: + log.debug("Attempted to get deleted key %s in '%s', returning None", key, kind.namespace) return callback(None) - return callback(f) + return callback(item) finally: self._lock.runlock() - def all(self, callback): + def all(self, kind, callback): try: self._lock.rlock() - return callback(dict((k, f) for k, f in self._features.items() if ('deleted' not in f) or not f['deleted'])) + itemsOfKind = self._items[kind] + return callback(dict((k, i) for k, i in itemsOfKind.items() if ('deleted' not in i) or not i['deleted'])) finally: self._lock.runlock() - def init(self, features): + def init(self, all_data): try: - self._lock.lock() - self._features = dict(features) + self._lock.rlock() + self._items.clear() + self._items.update(all_data) self._initialized = True - log.debug("Initialized feature store with " + str(len(features)) + " features") + for k in all_data: + log.debug("Initialized '%s' store with %d items", k.namespace, len(all_data[k])) finally: - self._lock.unlock() + self._lock.runlock() # noinspection PyShadowingNames - def delete(self, key, version): + def delete(self, kind, key, version): try: - self._lock.lock() - f = self._features.get(key) - if f is not None and f['version'] < version: - f['deleted'] = True - f['version'] = version - elif f is None: - f = {'deleted': True, 'version': version} - self._features[key] = f + self._lock.rlock() + itemsOfKind = self._items[kind] + i = itemsOfKind.get(key) + if i is None or i['version'] < version: + i = {'deleted': True, 'version': version} + itemsOfKind[key] = i finally: - self._lock.unlock() + self._lock.runlock() - def upsert(self, key, feature): + def upsert(self, kind, item): + key = item['key'] try: - self._lock.lock() - f = self._features.get(key) - if f is None or f['version'] < feature['version']: - self._features[key] = feature - log.debug("Updated feature {0} to version {1}".format(key, feature['version'])) + self._lock.rlock() + itemsOfKind = self._items[kind] + i = itemsOfKind.get(key) + if i is None or i['version'] < item['version']: + itemsOfKind[key] = item + log.debug("Updated %s in '%s' to version %d", key, kind.namespace, item['version']) finally: - self._lock.unlock() + self._lock.runlock() @property def initialized(self): diff --git a/ldclient/flag.py b/ldclient/flag.py index 5afbd810..06787de9 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -5,6 +5,7 @@ import sys from ldclient import operators +from ldclient.versioned_data_kind import FEATURES, SEGMENTS __LONG_SCALE__ = float(0xFFFFFFFFFFFFFFF) @@ -29,7 +30,7 @@ def _evaluate(flag, user, store, prereq_events=None): failed_prereq = None prereq_value = None for prereq in flag.get('prerequisites') or []: - prereq_flag = store.get(prereq.get('key'), lambda x: x) + prereq_flag = store.get(FEATURES, prereq.get('key'), lambda x: x) if prereq_flag is None: log.warn("Missing prereq flag: " + prereq.get('key')) failed_prereq = prereq @@ -49,11 +50,11 @@ def _evaluate(flag, user, store, prereq_events=None): if failed_prereq is not None: return None, events - index = _evaluate_index(flag, user) + index = _evaluate_index(flag, user, store) return _get_variation(flag, index), events -def _evaluate_index(feature, user): +def _evaluate_index(feature, user, store): # Check to see if any user targets match: for target in feature.get('targets') or []: for value in target.get('values') or []: @@ -62,7 +63,7 @@ def _evaluate_index(feature, user): # Now walk through the rules to see if any match for rule in feature.get('rules') or []: - if _rule_matches_user(rule, user): + if _rule_matches_user(rule, user, store): return _variation_index_for_user(feature, rule, user) # Walk through fallthrough and see if it matches @@ -103,7 +104,7 @@ def _variation_index_for_user(feature, rule, user): bucket_by = 'key' if rule['rollout'].get('bucketBy') is not None: bucket_by = rule['rollout']['bucketBy'] - bucket = _bucket_user(user, feature, bucket_by) + bucket = _bucket_user(user, feature['key'], feature['salt'], bucket_by) sum = 0.0 for wv in rule['rollout'].get('variations') or []: sum += wv.get('weight', 0.0) / 100000.0 @@ -113,7 +114,7 @@ def _variation_index_for_user(feature, rule, user): return None -def _bucket_user(user, feature, bucket_by): +def _bucket_user(user, key, salt, bucket_by): u_value, should_pass = _get_user_attribute(user, bucket_by) bucket_by_value = _bucketable_string_value(u_value) @@ -123,7 +124,7 @@ def _bucket_user(user, feature, bucket_by): id_hash = u_value if user.get('secondary') is not None: id_hash = id_hash + '.' + user['secondary'] - hash_key = '%s.%s.%s' % (feature['key'], feature['salt'], id_hash) + hash_key = '%s.%s.%s' % (key, salt, id_hash) hash_val = int(hashlib.sha1(hash_key.encode('utf-8')).hexdigest()[:15], 16) result = hash_val / __LONG_SCALE__ return result @@ -137,15 +138,25 @@ def _bucketable_string_value(u_value): return None -def _rule_matches_user(rule, user): +def _rule_matches_user(rule, user, store): for clause in rule.get('clauses') or []: if clause.get('attribute') is not None: - if not _clause_matches_user(clause, user): + if not _clause_matches_user(clause, user, store): return False return True -def _clause_matches_user(clause, user): +def _clause_matches_user(clause, user, store): + if clause.get('op') == 'segmentMatch': + for seg_key in clause.get('values') or []: + segment = store.get(SEGMENTS, seg_key, lambda x: x) + if segment is not None and _segment_matches_user(segment, user): + return _maybe_negate(clause, True) + return _maybe_negate(clause, False) + else: + return _clause_matches_user_no_segments(clause, user) + +def _clause_matches_user_no_segments(clause, user): u_value, should_pass = _get_user_attribute(user, clause.get('attribute')) if should_pass is True: return False @@ -161,6 +172,33 @@ def _clause_matches_user(clause, user): else: return _maybe_negate(clause, _match_any(op_fn, u_value, clause.get('values') or [])) +def _segment_matches_user(segment, user): + key = user.get('key') + if key is not None: + if key in segment.get('included', []): + return True + if key in segment.get('excluded', []): + return False + for rule in segment.get('rules', []): + if _segment_rule_matches_user(rule, user, segment.get('key'), segment.get('salt')): + return True + return False + +def _segment_rule_matches_user(rule, user, segment_key, salt): + for clause in rule.get('clauses') or []: + if not _clause_matches_user_no_segments(clause, user): + return False + + # If the weight is absent, this rule matches + if 'weight' not in rule or rule['weight'] is None: + return True + + # All of the clauses are met. See if the user buckets in + bucket_by = 'key' if rule.get('bucketBy') is None else rule['bucketBy'] + bucket = _bucket_user(user, segment_key, salt, bucket_by) + weight = rule['weight'] / 100000.0 + return bucket < weight + def _match_any(op_fn, u, vals): for v in vals: diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 80ae7a8c..af1caa86 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -3,18 +3,20 @@ class FeatureStore(object): """ - Stores and retrieves the state of feature flags + Stores and retrieves the state of feature flags and related data """ __metaclass__ = ABCMeta @abstractmethod - def get(self, key, callback): + def get(self, kind, key, callback): """ Gets a feature and calls the callback with the feature data to return the result - :param key: The feature key + :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + :type kind: VersionedDataKind + :param key: The key of the object :type key: str - :param callback: The function that accepts the feature data and returns the feature value - :type callback: Function that processes the feature flag once received. + :param callback: The function that accepts the retrieved data and returns a transformed value + :type callback: Function that processes the retrieved object once received. :return: The result of executing callback. """ @@ -22,39 +24,43 @@ def get(self, key, callback): def all(self, callback): """ Returns all feature flags and their data - :param callback: The function that accepts the feature data and returns the feature value - :type callback: Function that processes the feature flags once received. + :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + :type kind: VersionedDataKind + :param callback: The function that accepts the retrieved data and returns a transformed value + :type callback: Function that processes the retrieved objects once received. :rtype: The result of executing callback. """ @abstractmethod - def init(self, features): + def init(self, all_data): """ - Initializes the store with a set of feature flags. Meant to be called by the UpdateProcessor + Initializes the store with a set of objects. Meant to be called by the UpdateProcessor - :param features: The features and their data as provided by LD - :type features: dict[str, dict] + :param all_data: The features and their data as provided by LD + :type all_data: dict[VersionedDataKind, dict[str, dict]] """ @abstractmethod - def delete(self, key, version): + def delete(self, kind, key, version): """ - Marks a feature flag as deleted + Marks an object as deleted - :param key: The feature flag key + :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + :type kind: VersionedDataKind + :param key: The object key :type key: str - :param version: The version of the flag to mark as deleted - :type version: str + :param version: The version of the object to mark as deleted + :type version: int """ @abstractmethod - def upsert(self, key, feature): + def upsert(self, kind, item): """ - Inserts a feature flag if its version is newer or missing + Inserts an object if its version is newer or missing - :param key: The feature flag - :type key: str - :param feature: The feature information + :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + :type kind: VersionedDataKind + :param item: The object to be inserted or updated - must have key and version properties :type feature: dict """ diff --git a/ldclient/polling.py b/ldclient/polling.py index 3e6bec4a..4b71f668 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -23,7 +23,8 @@ def run(self): while self._running: start_time = time.time() try: - self._store.init(self._requester.get_all()) + all_data = self._requester.get_all_data() + self._store.init(all_data) if not self._ready.is_set() is True and self._store.initialized is True: log.info("PollingUpdateProcessor initialized ok") self._ready.set() diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index 111811dd..f3850cbe 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -7,6 +7,7 @@ from ldclient.expiringdict import ExpiringDict from ldclient.interfaces import FeatureStore from ldclient.memoized_value import MemoizedValue +from ldclient.versioned_data_kind import FEATURES class ForgetfulDict(dict): @@ -22,92 +23,103 @@ def __init__(self, expiration=15, capacity=1000): - self._features_key = "{0}:features".format(prefix) + self._prefix = prefix self._cache = ForgetfulDict() if expiration == 0 else ExpiringDict(max_len=capacity, max_age_seconds=expiration) self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) self._inited = MemoizedValue(lambda: self._query_init()) log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + prefix) - def init(self, features): - pipe = redis.Redis(connection_pool=self._pool).pipeline() - pipe.delete(self._features_key) + def _items_key(self, kind): + return "{0}:{1}".format(self._prefix, kind.namespace) - self._cache.clear() + def _cache_key(self, kind, key): + return "{0}:{1}".format(kind.namespace, key) - for k, f in features.items(): - f_json = json.dumps(f) - pipe.hset(self._features_key, k, f_json) - self._cache[k] = f - pipe.execute() - log.info("Initialized RedisFeatureStore with " + str(len(features)) + " feature flags") + def init(self, all_data): + pipe = redis.Redis(connection_pool=self._pool).pipeline() + + self._cache.clear() + all_count = 0 + + for kind, items in all_data.items(): + base_key = self._items_key(kind) + pipe.delete(base_key) + for key, item in items.items(): + item_json = json.dumps(item) + pipe.hset(base_key, key, item_json) + self._cache[self._cache_key(kind, key)] = item + all_count = all_count + len(items) + try: + pipe.execute() + except: + self._cache.clear() + raise + log.info("Initialized RedisFeatureStore with %d items", all_count) self._inited.set(True) - def all(self, callback): + def all(self, kind, callback): r = redis.Redis(connection_pool=self._pool) try: - all_features = r.hgetall(self._features_key) + all_items = r.hgetall(self._items_key(kind)) except BaseException as e: - log.error("RedisFeatureStore: Could not retrieve all flags from Redis with error: " - + e.message + " Returning None") + log.error("RedisFeatureStore: Could not retrieve '%s' from Redis with error: %s. Returning None.", + kind.namespace, e.message) return callback(None) - if all_features is None or all_features is "": - log.warn("RedisFeatureStore: call to get all flags returned no results. Returning None.") + if all_items is None or all_items is "": + log.warn("RedisFeatureStore: call to get all '%s' returned no results. Returning None.", kind.namespace) return callback(None) results = {} - for k, f_json in all_features.items() or {}: - f = json.loads(f_json.decode('utf-8')) - if 'deleted' in f and f['deleted'] is False: - results[f['key']] = f + for key, item_json in all_items.items(): + item = json.loads(item_json.decode('utf-8')) + if item.get('deleted', False) is False: + results[key] = item return callback(results) - def get(self, key, callback=lambda x: x): - f = self._get_even_if_deleted(key) - if f is not None: - if f.get('deleted', False) is True: - log.debug("RedisFeatureStore: get returned deleted flag from Redis. Returning None.") - return callback(None) - return callback(f) - - def _get_even_if_deleted(self, key): - f = self._cache.get(key) - if f is not None: + def get(self, kind, key, callback=lambda x: x): + item = self._get_even_if_deleted(kind, key) + if item is not None and item.get('deleted', False) is True: + log.debug("RedisFeatureStore: get returned deleted item %s in '%s'. Returning None.", key, kind.namespace) + return callback(None) + return callback(item) + + def _get_even_if_deleted(self, kind, key): + cacheKey = self._cache_key(kind, key) + item = self._cache.get(cacheKey) + if item is not None: # reset ttl - self._cache[key] = f - return f + self._cache[cacheKey] = item + return item try: r = redis.Redis(connection_pool=self._pool) - f_json = r.hget(self._features_key, key) + item_json = r.hget(self._items_key(kind), key) except BaseException as e: - log.error("RedisFeatureStore: Could not retrieve flag from redis with error: " + e.message - + ". Returning None for key: " + key) + log.error("RedisFeatureStore: Could not retrieve key %s from '%s' with error: %s", + key, kind.namespace, e.message) return None - if f_json is None or f_json is "": - log.debug("RedisFeatureStore: feature flag with key: " + key + " not found in Redis. Returning None.") + if item_json is None or item_json is "": + log.debug("RedisFeatureStore: key %s not found in '%s'. Returning None.", key, kind.namespace) return None - f = json.loads(f_json.decode('utf-8')) - self._cache[key] = f - return f + item = json.loads(item_json.decode('utf-8')) + self._cache[cacheKey] = item + return item - def delete(self, key, version): + def delete(self, kind, key, version): r = redis.Redis(connection_pool=self._pool) - r.watch(self._features_key) - f_json = r.hget(self._features_key, key) - if f_json: - f = json.loads(f_json.decode('utf-8')) - if f is not None and f['version'] < version: - f['deleted'] = True - f['version'] = version - elif f is None: - f = {'deleted': True, 'version': version} - f_json = json.dumps(f) - r.hset(self._features_key, key, f_json) - self._cache[key] = f + baseKey = self._items_key(kind) + r.watch(baseKey) + item_json = r.hget(baseKey, key) + item = None if item_json is None else json.loads(item_json.decode('utf-8')) + if item is None or item['version'] < version: + deletedItem = { "deleted": True, "version": version } + item_json = json.dumps(deletedItem) + r.hset(baseKey, key, item_json) + self._cache[self._cache_key(kind, key)] = deletedItem r.unwatch() @property @@ -116,18 +128,20 @@ def initialized(self): def _query_init(self): r = redis.Redis(connection_pool=self._pool) - return r.exists(self._features_key) + return r.exists(self._items_key(FEATURES)) - def upsert(self, key, feature): + def upsert(self, kind, item): r = redis.Redis(connection_pool=self._pool) - r.watch(self._features_key) - old = self._get_even_if_deleted(key) + baseKey = self._items_key(kind) + key = item['key'] + r.watch(baseKey) + old = self._get_even_if_deleted(kind, key) if old: - if old['version'] >= feature['version']: + if old['version'] >= item['version']: r.unwatch() return - feature_json = json.dumps(feature) - r.hset(self._features_key, key, feature_json) - self._cache[key] = feature + item_json = json.dumps(item) + r.hset(baseKey, key, item_json) + self._cache[self._cache_key(kind, key)] = item r.unwatch() diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 0f6a29f3..55957405 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -1,4 +1,5 @@ from __future__ import absolute_import +from collections import namedtuple import json from threading import Thread @@ -10,17 +11,22 @@ from ldclient.interfaces import UpdateProcessor from ldclient.sse_client import SSEClient from ldclient.util import _stream_headers, log +from ldclient.versioned_data_kind import FEATURES, SEGMENTS # allows for up to 5 minutes to elapse without any data sent across the stream. The heartbeats sent as comments on the # stream will keep this from triggering stream_read_timeout = 5 * 60 +STREAM_ALL_PATH = '/all' + +ParsedPath = namedtuple('ParsedPath', ['kind', 'key']) + class StreamingUpdateProcessor(Thread, UpdateProcessor): def __init__(self, config, requester, store, ready): Thread.__init__(self) self.daemon = True - self._uri = config.stream_uri + self._uri = config.stream_base_uri + STREAM_ALL_PATH self._config = config self._requester = requester self._store = store @@ -83,34 +89,54 @@ def initialized(self): @staticmethod def process_message(store, requester, msg): if msg.event == 'put': - flags = json.loads(msg.data) - versions_summary = list(map(lambda f: "{0}:{1}".format(f.get("key"), f.get("version")), flags.values())) - log.debug("Received put event with {0} flags and versions: {1}".format(len(flags), versions_summary)) - store.init(flags) + all_data = json.loads(msg.data) + init_data = { + FEATURES: all_data['data']['flags'], + SEGMENTS: all_data['data']['segments'] + } + log.debug("Received put event with %d flags and %d segments", + len(init_data[FEATURES]), len(init_data[SEGMENTS])) + store.init(init_data) return True elif msg.event == 'patch': payload = json.loads(msg.data) - key = payload['path'][1:] - flag = payload['data'] - log.debug("Received patch event for flag key: [{0}] New version: [{1}]" - .format(flag.get("key"), str(flag.get("version")))) - store.upsert(key, flag) + path = payload['path'] + obj = payload['data'] + log.debug("Received patch event for %s, New version: [%d]", path, obj.get("version")) + target = _parse_path(path) + if target is not None: + store.upsert(target.kind, obj) + else: + log.warning("Patch for unknown path: %s", path) elif msg.event == "indirect/patch": - key = msg.data - log.debug("Received indirect/patch event for flag key: " + key) - store.upsert(key, requester.get_one(key)) + path = msg.data + log.debug("Received indirect/patch event for %s", path) + target = _parse_path(path) + if target is not None: + store.upsert(target.kind, requester.get_one(target.kind, target.key)) + else: + log.warning("Indirect patch for unknown path: %s", path) elif msg.event == "indirect/put": log.debug("Received indirect/put event") - store.init(requester.get_all()) + store.init(requester.get_all_data()) return True elif msg.event == 'delete': payload = json.loads(msg.data) - key = payload['path'][1:] + path = payload['path'] # noinspection PyShadowingNames version = payload['version'] - log.debug("Received delete event for flag key: [{0}] New version: [{1}]" - .format(key, version)) - store.delete(key, version) + log.debug("Received delete event for %s, New version: [%d]", path, version) + target = _parse_path(path) + if target is not None: + store.delete(target.kind, target.key, version) + else: + log.warning("Delete for unknown path: %s", path) else: log.warning('Unhandled event in stream processor: ' + msg.event) return False + + def _parse_path(self, path): + for kind in [FEATURES, SEGMENTS]: + if path.startsWith(kind.stream_api_path): + return ParsedPath(kind = kind, key = path.substring(len(kind.stream_api_path))) + return None diff --git a/ldclient/twisted_client.py b/ldclient/twisted_client.py deleted file mode 100644 index 90ce50dc..00000000 --- a/ldclient/twisted_client.py +++ /dev/null @@ -1,80 +0,0 @@ -from functools import partial - -from twisted.internet import defer -from twisted.internet.defer import DeferredList - -from ldclient import LDClient -from ldclient import log -from ldclient.flag import _get_variation, _evaluate_index, _get_off_variation - - -class TwistedLDClient(LDClient): - @defer.inlineCallbacks - def _evaluate_and_send_events(self, flag, user, default): - value = yield self._evaluate(flag, user) - if value is None: - value = default - log.info("value: " + str(value)) - self._send_event({'kind': 'feature', 'key': flag.get('key'), 'user': user, 'value': value, - 'default': default, 'version': flag.get('version')}) - defer.returnValue(value) - - def _evaluate(self, flag, user): - if flag.get('on', False): - def cb(result): - if result is not None: - return result - return _get_off_variation(flag) - - value = self._evaluate_internal(flag, user) - value.addBoth(cb) - return value - - return _get_off_variation(flag) - - def _evaluate_internal(self, flag, user): - def check_prereq_results(result): - prereq_ok = True - for (success, prereq_ok) in result: - if success is False or prereq_ok is False: - prereq_ok = False - - if prereq_ok is True: - index = _evaluate_index(flag, user) - variation = _get_variation(flag, index) - return variation - return None - - results = DeferredList(map(partial(self._evaluate_prereq, user), flag.get('prerequisites') or [])) - results.addBoth(check_prereq_results) - return results - - # returns False if the prereq failed or there was an error evaluating it. Otherwise returns True - def _evaluate_prereq(self, user, prereq): - - @defer.inlineCallbacks - def eval_prereq(prereq_flag): - if prereq_flag is None: - log.warn("Missing prereq flag: " + prereq.get('key')) - defer.returnValue(False) - if prereq_flag.get('on', False) is True: - prereq_value = yield self._evaluate_internal(prereq_flag, user) - variation = _get_variation(prereq_flag, prereq.get('variation')) - if prereq_value is None or not prereq_value == variation: - ok = False - else: - ok = True - else: - ok = False - defer.returnValue(ok) - - result = self._store.get(prereq.get('key'), eval_prereq) - return result - - @defer.inlineCallbacks - def _evaluate_multi(self, user, flags): - results = {} - for k, v in flags.items() or {}: - r = yield self._evaluate(v, user) - results[k] = r - defer.returnValue(results) diff --git a/ldclient/twisted_event_consumer.py b/ldclient/twisted_event_consumer.py deleted file mode 100644 index e2f69266..00000000 --- a/ldclient/twisted_event_consumer.py +++ /dev/null @@ -1,91 +0,0 @@ -from __future__ import absolute_import - -import errno -import json - -import txrequests -from cachecontrol import CacheControl -from queue import Empty -from requests.packages.urllib3.exceptions import ProtocolError -from twisted.internet import task, defer - -from ldclient.event_serializer import EventSerializer -from ldclient.interfaces import EventConsumer -from ldclient.util import _headers, log - - -class TwistedEventConsumer(EventConsumer): - - def __init__(self, queue, config): - self._queue = queue - """ :type: queue.Queue """ - - self._session = CacheControl(txrequests.Session()) - """ :type: txrequests.Session """ - - self._config = config - """ :type: ldclient.twisted.TwistedConfig """ - - self._serializer = EventSerializer(config) - - self._looping_call = None - """ :type: LoopingCall""" - - def start(self): - self._looping_call = task.LoopingCall(self._consume) - self._looping_call.start(5) - - def stop(self): - self._looping_call.stop() - - def is_alive(self): - return self._looping_call is not None and self._looping_call.running - - def flush(self): - return self._consume() - - def _consume(self): - items = [] - try: - while True: - items.append(self._queue.get_nowait()) - except Empty: - pass - - if items: - return self.send_batch(items) - - @defer.inlineCallbacks - def send_batch(self, events): - @defer.inlineCallbacks - def do_send(should_retry): - # noinspection PyBroadException - try: - json_body = self._serializer.serialize_events(events) - hdrs = _headers(self._config.sdk_key) - r = yield self._session.post(self._config.events_uri, - headers=hdrs, - timeout=(self._config.connect_timeout, self._config.read_timeout), - data=json_body) - if r.status_code == 401 - log.error('Received 401 error, no further events will be posted since SDK key is invalid') - self.stop() - return - r.raise_for_status() - except ProtocolError as e: - inner = e.args[1] - if inner.errno == errno.ECONNRESET and should_retry: - log.warning( - 'ProtocolError exception caught while sending events. Retrying.') - yield do_send(False) - else: - log.exception( - 'Unhandled exception in event consumer. Analytics events were not processed.') - except: - log.exception( - 'Unhandled exception in event consumer. Analytics events were not processed.') - try: - yield do_send(True) - finally: - for _ in events: - self._queue.task_done() diff --git a/ldclient/twisted_redis_feature_store.py b/ldclient/twisted_redis_feature_store.py deleted file mode 100644 index de2566ed..00000000 --- a/ldclient/twisted_redis_feature_store.py +++ /dev/null @@ -1,133 +0,0 @@ -from __future__ import absolute_import - -import json -import urlparse - -from twisted.internet import defer -from twisted.internet import protocol, reactor -from txredis.client import RedisClient - -from ldclient.expiringdict import ExpiringDict -from ldclient.interfaces import FeatureStore -from ldclient.redis_feature_store import ForgetfulDict, INIT_KEY -from ldclient.util import log - - -class TwistedRedisFeatureStore(FeatureStore): - def __init__(self, - url='redis://localhost:6379/0', - expiration=15, - capacity=1000, - redis_prefix='launchdarkly'): - self._url = url - parsed_url = urlparse.urlparse(url) - self._redis_host = parsed_url.hostname - self._redis_port = parsed_url.port - self._features_key = "{0}:features".format(redis_prefix) - self._cache = ForgetfulDict() if expiration == 0 else ExpiringDict(max_len=capacity, - max_age_seconds=expiration) - log.info("Created TwistedRedisFeatureStore with url: " + url + " using key: " + self._features_key) - - def _get_connection(self): - client_creator = protocol.ClientCreator(reactor, RedisClient) - return client_creator.connectTCP(self._redis_host, self._redis_port) - - def initialized(self): - initialized = self._cache.get(INIT_KEY) - if initialized: - # reset ttl - self._cache[INIT_KEY] = True - return True - - @defer.inlineCallbacks - def redis_initialized(): - r = yield self._get_connection() - """ :type: RedisClient """ - i = yield r.exists(self._features_key) - if i: - # reset ttl - self._cache[INIT_KEY] = True - defer.returnValue(i) - - initialized = redis_initialized() - return initialized - - def upsert(self, key, feature): - raise NotImplementedError() - - def all(self, callback): - @defer.inlineCallbacks - def redis_get_all(): - r = None - try: - r = yield self._get_connection() - """ :type: RedisClient """ - all_features = yield r.hgetall(self._features_key) - if all_features is None or all_features is "": - log.warn("TwistedRedisFeatureStore: call to get all flags returned no results. Returning None.") - defer.returnValue(None) - - results = {} - for k, f_json in all_features.items() or {}: - f = json.loads(f_json.decode('utf-8')) - if 'deleted' in f and f['deleted'] is False: - results[f['key']] = f - defer.returnValue(results) - except Exception as e: - log.error("Could not connect to Redis using url: " + self._url + " with error message: " + e.message) - defer.returnValue(None) - finally: - if r: - r.quit() - defer.returnValue(None) - - all_flags = redis_get_all() - all_flags.addBoth(callback) - return all_flags - - def delete(self, key, version): - raise NotImplementedError() - - def init(self, features): - raise NotImplementedError() - - def get(self, key, callback): - @defer.inlineCallbacks - def redis_get(): - r = None - try: - r = yield self._get_connection() - """ :type: RedisClient """ - get_result = yield r.hget(self._features_key, key) - if not get_result: - log.warn("Didn't get response from redis for key: " + key + " Returning None.") - defer.returnValue(None) - f_json = get_result.get(key) - if f_json is None or f_json is "": - log.warn( - "TwistedRedisFeatureStore: feature flag with key: " + key + " not found in Redis. Returning None.") - defer.returnValue(None) - - f = json.loads(f_json.decode('utf-8')) - if f.get('deleted', False) is True: - log.warn("TwistedRedisFeatureStore: get returned deleted flag from Redis. Returning None.") - defer.returnValue(None) - self._cache[key] = f - defer.returnValue(f) - except Exception as e: - log.error("Could not connect to Redis using url: " + self._url + " with error message: " + e.message) - defer.returnValue(None) - finally: - if r: - r.quit() - defer.returnValue(None) - - cached = self._cache.get(key) - if cached is not None: - # reset ttl - self._cache[key] = cached - return callback(cached) - - f = redis_get() - f.addBoth(callback) - return f diff --git a/ldclient/versioned_data_kind.py b/ldclient/versioned_data_kind.py new file mode 100644 index 00000000..6df96a32 --- /dev/null +++ b/ldclient/versioned_data_kind.py @@ -0,0 +1,19 @@ +from collections import namedtuple + +""" +These objects denote the types of data that can be stored in the feature store and +referenced in the API. If we add another storable data type in the future, as long as it +follows the same pattern (having "key", "version", and "deleted" properties), we only need +to add a corresponding constant here and the existing store should be able to handle it. +""" + +VersionedDataKind = namedtuple('VersionedDataKind', + ['namespace', 'request_api_path', 'stream_api_path']) + +FEATURES = VersionedDataKind(namespace = "features", + request_api_path = "/sdk/latest-flags", + stream_api_path = "/flags/") + +SEGMENTS = VersionedDataKind(namespace = "segments", + request_api_path = "/sdk/latest-segments", + stream_api_path = "/segments/") diff --git a/setup.py b/setup.py index 79856397..f7549f5c 100644 --- a/setup.py +++ b/setup.py @@ -14,8 +14,6 @@ install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1()) python26_reqs = parse_requirements('python2.6-requirements.txt', session=uuid.uuid1()) test_reqs = parse_requirements('test-requirements.txt', session=uuid.uuid1()) -twisted_reqs = parse_requirements( - 'twisted-requirements.txt', session=uuid.uuid1()) redis_reqs = parse_requirements('redis-requirements.txt', session=uuid.uuid1()) # reqs is a list of requirement @@ -23,7 +21,6 @@ reqs = [str(ir.req) for ir in install_reqs] python26reqs = [str(ir.req) for ir in python26_reqs] testreqs = [str(ir.req) for ir in test_reqs] -txreqs = [str(ir.req) for ir in twisted_reqs] redisreqs = [str(ir.req) for ir in redis_reqs] @@ -66,7 +63,6 @@ def run(self): 'Topic :: Software Development :: Libraries', ], extras_require={ - "twisted": txreqs, "redis": redisreqs, "python2.6": python26reqs }, diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 58656912..cabc40df 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -3,6 +3,7 @@ from ldclient.feature_store import InMemoryFeatureStore from ldclient.redis_feature_store import RedisFeatureStore +from ldclient.versioned_data_kind import FEATURES class TestFeatureStore: @@ -51,14 +52,13 @@ def make_feature(key, ver): def base_initialized_store(self, store): store.init({ - 'foo': self.make_feature('foo', 10), - 'bar': self.make_feature('bar', 10), + FEATURES: { + 'foo': self.make_feature('foo', 10), + 'bar': self.make_feature('bar', 10), + } }) return store - def test_not_initially_initialized(self, store): - assert store.initialized is False - def test_initialized(self, store): store = self.base_initialized_store(store) assert store.initialized is True @@ -66,50 +66,57 @@ def test_initialized(self, store): def test_get_existing_feature(self, store): store = self.base_initialized_store(store) expected = self.make_feature('foo', 10) - assert store.get('foo', lambda x: x) == expected + assert store.get(FEATURES, 'foo', lambda x: x) == expected def test_get_nonexisting_feature(self, store): store = self.base_initialized_store(store) - assert store.get('biz', lambda x: x) is None + assert store.get(FEATURES, 'biz', lambda x: x) is None + + def test_get_all_versions(self, store): + store = self.base_initialized_store(store) + result = store.all(FEATURES, lambda x: x) + assert len(result) is 2 + assert result.get('foo') == self.make_feature('foo', 10) + assert result.get('bar') == self.make_feature('bar', 10) def test_upsert_with_newer_version(self, store): store = self.base_initialized_store(store) new_ver = self.make_feature('foo', 11) - store.upsert('foo', new_ver) - assert store.get('foo', lambda x: x) == new_ver + store.upsert(FEATURES, new_ver) + assert store.get(FEATURES, 'foo', lambda x: x) == new_ver def test_upsert_with_older_version(self, store): store = self.base_initialized_store(store) new_ver = self.make_feature('foo', 9) expected = self.make_feature('foo', 10) - store.upsert('foo', new_ver) - assert store.get('foo', lambda x: x) == expected + store.upsert(FEATURES, new_ver) + assert store.get(FEATURES, 'foo', lambda x: x) == expected def test_upsert_with_new_feature(self, store): store = self.base_initialized_store(store) new_ver = self.make_feature('biz', 1) - store.upsert('biz', new_ver) - assert store.get('biz', lambda x: x) == new_ver + store.upsert(FEATURES, new_ver) + assert store.get(FEATURES, 'biz', lambda x: x) == new_ver def test_delete_with_newer_version(self, store): store = self.base_initialized_store(store) - store.delete('foo', 11) - assert store.get('foo', lambda x: x) is None + store.delete(FEATURES, 'foo', 11) + assert store.get(FEATURES, 'foo', lambda x: x) is None def test_delete_unknown_feature(self, store): store = self.base_initialized_store(store) - store.delete('biz', 11) - assert store.get('biz', lambda x: x) is None + store.delete(FEATURES, 'biz', 11) + assert store.get(FEATURES, 'biz', lambda x: x) is None def test_delete_with_older_version(self, store): store = self.base_initialized_store(store) - store.delete('foo', 9) + store.delete(FEATURES, 'foo', 9) expected = self.make_feature('foo', 10) - assert store.get('foo', lambda x: x) == expected + assert store.get(FEATURES, 'foo', lambda x: x) == expected def test_upsert_older_version_after_delete(self, store): store = self.base_initialized_store(store) - store.delete('foo', 11) + store.delete(FEATURES, 'foo', 11) old_ver = self.make_feature('foo', 9) - store.upsert('foo', old_ver) - assert store.get('foo', lambda x: x) is None + store.upsert(FEATURES, old_ver) + assert store.get(FEATURES, 'foo', lambda x: x) is None diff --git a/testing/test_flag.py b/testing/test_flag.py index b17a0152..8b9740aa 100644 --- a/testing/test_flag.py +++ b/testing/test_flag.py @@ -1,6 +1,7 @@ import pytest from ldclient.feature_store import InMemoryFeatureStore from ldclient.flag import _bucket_user, evaluate +from ldclient.versioned_data_kind import FEATURES, SEGMENTS empty_store = InMemoryFeatureStore() @@ -57,7 +58,7 @@ def test_flag_returns_off_variation_and_event_if_prerequisite_is_not_met(): 'variations': ['d', 'e'], 'version': 2 } - store.upsert('feature1', flag1) + store.upsert(FEATURES, flag1) user = { 'key': 'x' } events_should_be = [{'kind': 'feature', 'key': 'feature1', 'value': 'd', 'version': 2, 'user': user, 'prereqOf': 'feature0'}] @@ -81,7 +82,7 @@ def test_flag_returns_fallthrough_and_event_if_prereq_is_met_and_there_are_no_ru 'variations': ['d', 'e'], 'version': 2 } - store.upsert('feature1', flag1) + store.upsert(FEATURES, flag1) user = { 'key': 'x' } events_should_be = [{'kind': 'feature', 'key': 'feature1', 'value': 'e', 'version': 2, 'user': user, 'prereqOf': 'feature0'}] @@ -122,6 +123,60 @@ def test_flag_matches_user_from_rules(): user = { 'key': 'userkey' } assert evaluate(flag, user, empty_store) == ('c', []) +def test_segment_match_clause_retrieves_segment_from_store(): + store = InMemoryFeatureStore() + segment = { + "key": "segkey", + "included": [ "foo" ], + "version": 1 + } + store.upsert(SEGMENTS, segment) + + user = { "key": "foo" } + flag = { + "key": "test", + "variations": [ False, True ], + "fallthrough": { "variation": 0 }, + "on": True, + "rules": [ + { + "clauses": [ + { + "attribute": "", + "op": "segmentMatch", + "values": [ "segkey" ] + } + ], + "variation": 1 + } + ] + } + + assert evaluate(flag, user, store) == (True, []) + +def test_segment_match_clause_falls_through_with_no_errors_if_segment_not_found(): + user = { "key": "foo" } + flag = { + "key": "test", + "variations": [ False, True ], + "fallthrough": { "variation": 0 }, + "on": True, + "rules": [ + { + "clauses": [ + { + "attribute": "", + "op": "segmentMatch", + "values": [ "segkey" ] + } + ], + "variation": 1 + } + ] + } + + assert evaluate(flag, user, empty_store) == (False, []) + def test_clause_matches_builtin_attribute(): clause = { 'attribute': 'name', @@ -181,18 +236,16 @@ def _make_bool_flag_from_clause(clause): def test_bucket_by_user_key(): - feature = { u'key': u'hashKey', u'salt': u'saltyA' } - user = { u'key': u'userKeyA' } - bucket = _bucket_user(user, feature, 'key') + bucket = _bucket_user(user, 'hashKey', 'saltyA', 'key') assert bucket == pytest.approx(0.42157587) user = { u'key': u'userKeyB' } - bucket = _bucket_user(user, feature, 'key') + bucket = _bucket_user(user, 'hashKey', 'saltyA', 'key') assert bucket == pytest.approx(0.6708485) user = { u'key': u'userKeyC' } - bucket = _bucket_user(user, feature, 'key') + bucket = _bucket_user(user, 'hashKey', 'saltyA', 'key') assert bucket == pytest.approx(0.10343106) def test_bucket_by_int_attr(): @@ -204,9 +257,9 @@ def test_bucket_by_int_attr(): u'stringAttr': u'33333' } } - bucket = _bucket_user(user, feature, 'intAttr') + bucket = _bucket_user(user, 'hashKey', 'saltyA', 'intAttr') assert bucket == pytest.approx(0.54771423) - bucket2 = _bucket_user(user, feature, 'stringAttr') + bucket2 = _bucket_user(user, 'hashKey', 'saltyA', 'stringAttr') assert bucket2 == bucket def test_bucket_by_float_attr_not_allowed(): @@ -217,5 +270,5 @@ def test_bucket_by_float_attr_not_allowed(): u'floatAttr': 33.5 } } - bucket = _bucket_user(user, feature, 'floatAttr') + bucket = _bucket_user(user, 'hashKey', 'saltyA', 'floatAttr') assert bucket == 0.0 diff --git a/testing/test_segment.py b/testing/test_segment.py new file mode 100644 index 00000000..02b9ecfa --- /dev/null +++ b/testing/test_segment.py @@ -0,0 +1,155 @@ +import pytest + +from ldclient.flag import _segment_matches_user + + +def test_explicit_include_user(): + s = { + "key": "test", + "included": [ "foo" ], + "version": 1 + } + u = { "key": "foo" } + assert _segment_matches_user(s, u) is True + +def test_explicit_exclude_user(): + s = { + "key": "test", + "excluded": [ "foo" ], + "version": 1 + } + u = { "key": "foo" } + assert _segment_matches_user(s, u) is False + +def test_explicit_include_has_precedence(): + s = { + "key": "test", + "included": [ "foo" ], + "excluded": [ "foo" ], + "version": 1 + } + u = { "key": "foo" } + assert _segment_matches_user(s, u) is True + +def test_matching_rule_with_no_weight(): + s = { + "key": "test", + "rules": [ + { + "clauses": [ + { + "attribute": "email", + "op": "in", + "values": [ "test@example.com" ] + } + ] + } + ] + } + u = { "key": "foo", "email": "test@example.com" } + assert _segment_matches_user(s, u) is True + +def test_matching_rule_with_none_weight(): + s = { + "key": "test", + "rules": [ + { + "clauses": [ + { + "attribute": "email", + "op": "in", + "values": [ "test@example.com" ] + } + ], + "weight": None + } + ] + } + u = { "key": "foo", "email": "test@example.com" } + assert _segment_matches_user(s, u) is True + +def test_matching_rule_with_full_rollout(): + s = { + "key": "test", + "rules": [ + { + "clauses": [ + { + "attribute": "email", + "op": "in", + "values": [ "test@example.com" ] + } + ], + "weight": 100000 + } + ] + } + u = { "key": "foo", "email": "test@example.com" } + assert _segment_matches_user(s, u) is True + +def test_matching_rule_with_zero_rollout(): + s = { + "key": "test", + "rules": [ + { + "clauses": [ + { + "attribute": "email", + "op": "in", + "values": [ "test@example.com" ] + } + ], + "weight": 0 + } + ] + } + u = { "key": "foo", "email": "test@example.com" } + assert _segment_matches_user(s, u) is False + +def test_matching_rule_with_multiple_clauses(): + s = { + "key": "test", + "rules": [ + { + "clauses": [ + { + "attribute": "email", + "op": "in", + "values": [ "test@example.com" ] + }, + { + "attribute": "name", + "op": "in", + "values": [ "bob" ] + } + ], + "weight": 100000 + } + ] + } + u = { "key": "foo", "email": "test@example.com", "name": "bob" } + assert _segment_matches_user(s, u) is True + +def test_non_matching_rule_with_multiple_clauses(): + s = { + "key": "test", + "rules": [ + { + "clauses": [ + { + "attribute": "email", + "op": "in", + "values": [ "test@example.com" ] + }, + { + "attribute": "name", + "op": "in", + "values": [ "bill" ] + } + ], + "weight": 100000 + } + ] + } + u = { "key": "foo", "email": "test@example.com", "name": "bob" } + assert _segment_matches_user(s, u) is False diff --git a/twisted-requirements.txt b/twisted-requirements.txt deleted file mode 100644 index e99d9e35..00000000 --- a/twisted-requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -cryptography>=1.0 -pyOpenSSL>=0.14 -service_identity>=16.0 -txredis>=2.4 -txrequests>=0.9.2