From 96a1cc30737e13108d9a5211a6b998db1096e8fe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 4 Oct 2018 19:17:42 -0700 Subject: [PATCH 001/190] add option to reduce front-end metadata for untracked flags --- ldclient/client.py | 10 +++-- ldclient/flags_state.py | 12 ++++-- testing/test_flags_state.py | 21 +++++----- testing/test_ldclient_evaluation.py | 59 +++++++++++++++++++++++++++-- 4 files changed, 81 insertions(+), 21 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 683a5c3b..039fad52 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -296,7 +296,10 @@ def all_flags_state(self, user, **kwargs): :param kwargs: optional parameters affecting how the state is computed: set `client_side_only=True` to limit it to only flags that are marked for use with the client-side SDK (by default, all flags are included); set `with_reasons=True` to - include evaluation reasons in the state (see `variation_detail`) + include evaluation reasons in the state (see `variation_detail`); set + `details_only_for_tracked_flags=True` to omit any metadata that is normally only + used for event generation, such as flag versions and evaluation reasons, unless + the flag has event tracking or debugging turned on :return: a FeatureFlagsState object (will never be None; its 'valid' property will be False if the client is offline, has not been initialized, or the user is None or has no key) :rtype: FeatureFlagsState @@ -319,6 +322,7 @@ def all_flags_state(self, user, **kwargs): state = FeatureFlagsState(True) client_only = kwargs.get('client_side_only', False) with_reasons = kwargs.get('with_reasons', False) + details_only_if_tracked = kwargs.get('details_only_for_tracked_flags', False) try: flags_map = self._store.all(FEATURES, lambda x: x) if flags_map is None: @@ -333,12 +337,12 @@ def all_flags_state(self, user, **kwargs): try: detail = evaluate(flag, user, self._store, False).detail state.add_flag(flag, detail.value, detail.variation_index, - detail.reason if with_reasons else None) + detail.reason if with_reasons else None, details_only_if_tracked) except Exception as e: log.error("Error evaluating flag \"%s\" in all_flags_state: %s" % (key, e)) log.debug(traceback.format_exc()) reason = {'kind': 'ERROR', 'errorKind': 'EXCEPTION'} - state.add_flag(flag, None, None, reason if with_reasons else None) + state.add_flag(flag, None, None, reason if with_reasons else None, details_only_if_tracked) return state diff --git a/ldclient/flags_state.py b/ldclient/flags_state.py index c76b4908..cbfde1ec 100644 --- a/ldclient/flags_state.py +++ b/ldclient/flags_state.py @@ -12,15 +12,19 @@ def __init__(self, valid): self.__flag_metadata = {} self.__valid = valid - def add_flag(self, flag, value, variation, reason): + def add_flag(self, flag, value, variation, reason, details_only_if_tracked): """Used internally to build the state map.""" key = flag['key'] self.__flag_values[key] = value - meta = { 'version': flag.get('version'), 'trackEvents': flag.get('trackEvents') } + meta = {} + if (not details_only_if_tracked) or flag.get('trackEvents') or flag.get('debugEventsUntilDate'): + meta['version'] = flag.get('version') + if reason is not None: + meta['reason'] = reason if variation is not None: meta['variation'] = variation - if reason is not None: - meta['reason'] = reason + if flag.get('trackEvents'): + meta['trackEvents'] = True if flag.get('debugEventsUntilDate') is not None: meta['debugEventsUntilDate'] = flag.get('debugEventsUntilDate') self.__flag_metadata[key] = meta diff --git a/testing/test_flags_state.py b/testing/test_flags_state.py index 2fe5b123..45ea6404 100644 --- a/testing/test_flags_state.py +++ b/testing/test_flags_state.py @@ -6,7 +6,7 @@ def test_can_get_flag_value(): state = FeatureFlagsState(True) flag = { 'key': 'key' } - state.add_flag(flag, 'value', 1, None) + state.add_flag(flag, 'value', 1, None, False) assert state.get_flag_value('key') == 'value' def test_returns_none_for_unknown_flag(): @@ -17,16 +17,16 @@ def test_can_convert_to_values_map(): state = FeatureFlagsState(True) flag1 = { 'key': 'key1' } flag2 = { 'key': 'key2' } - state.add_flag(flag1, 'value1', 0, None) - state.add_flag(flag2, 'value2', 1, None) + state.add_flag(flag1, 'value1', 0, None, False) + state.add_flag(flag2, 'value2', 1, None, False) assert state.to_values_map() == { 'key1': 'value1', 'key2': 'value2' } def test_can_convert_to_json_dict(): state = FeatureFlagsState(True) flag1 = { 'key': 'key1', 'version': 100, 'offVariation': 0, 'variations': [ 'value1' ], 'trackEvents': False } flag2 = { 'key': 'key2', 'version': 200, 'offVariation': 1, 'variations': [ 'x', 'value2' ], 'trackEvents': True, 'debugEventsUntilDate': 1000 } - state.add_flag(flag1, 'value1', 0, None) - state.add_flag(flag2, 'value2', 1, None) + state.add_flag(flag1, 'value1', 0, None, False) + state.add_flag(flag2, 'value2', 1, None, False) result = state.to_json_dict() assert result == { @@ -35,8 +35,7 @@ def test_can_convert_to_json_dict(): '$flagsState': { 'key1': { 'variation': 0, - 'version': 100, - 'trackEvents': False + 'version': 100 }, 'key2': { 'variation': 1, @@ -52,8 +51,8 @@ def test_can_convert_to_json_string(): state = FeatureFlagsState(True) flag1 = { 'key': 'key1', 'version': 100, 'offVariation': 0, 'variations': [ 'value1' ], 'trackEvents': False } flag2 = { 'key': 'key2', 'version': 200, 'offVariation': 1, 'variations': [ 'x', 'value2' ], 'trackEvents': True, 'debugEventsUntilDate': 1000 } - state.add_flag(flag1, 'value1', 0, None) - state.add_flag(flag2, 'value2', 1, None) + state.add_flag(flag1, 'value1', 0, None, False) + state.add_flag(flag2, 'value2', 1, None, False) obj = state.to_json_dict() str = state.to_json_string() @@ -63,8 +62,8 @@ def test_can_serialize_with_jsonpickle(): state = FeatureFlagsState(True) flag1 = { 'key': 'key1', 'version': 100, 'offVariation': 0, 'variations': [ 'value1' ], 'trackEvents': False } flag2 = { 'key': 'key2', 'version': 200, 'offVariation': 1, 'variations': [ 'x', 'value2' ], 'trackEvents': True, 'debugEventsUntilDate': 1000 } - state.add_flag(flag1, 'value1', 0, None) - state.add_flag(flag2, 'value2', 1, None) + state.add_flag(flag1, 'value1', 0, None, False) + state.add_flag(flag2, 'value2', 1, None, False) obj = state.to_json_dict() str = jsonpickle.encode(state, unpicklable=False) diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index 9183034b..81719564 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -149,8 +149,7 @@ def test_all_flags_state_returns_state(): '$flagsState': { 'key1': { 'variation': 0, - 'version': 100, - 'trackEvents': False + 'version': 100 }, 'key2': { 'variation': 1, @@ -176,7 +175,6 @@ def test_all_flags_state_returns_state_with_reasons(): 'key1': { 'variation': 0, 'version': 100, - 'trackEvents': False, 'reason': {'kind': 'OFF'} }, 'key2': { @@ -229,6 +227,61 @@ def test_all_flags_state_can_be_filtered_for_client_side_flags(): values = state.to_values_map() assert values == { 'client-side-1': 'value1', 'client-side-2': 'value2' } +def test_all_flags_state_can_omit_details_for_untracked_flags(): + flag1 = { + 'key': 'key1', + 'version': 100, + 'on': False, + 'offVariation': 0, + 'variations': [ 'value1' ], + 'trackEvents': False + } + flag2 = { + 'key': 'key2', + 'version': 200, + 'on': False, + 'offVariation': 1, + 'variations': [ 'x', 'value2' ], + 'trackEvents': True + } + flag3 = { + 'key': 'key3', + 'version': 300, + 'on': False, + 'offVariation': 1, + 'variations': [ 'x', 'value3' ], + 'debugEventsUntilDate': 1000 + } + store = InMemoryFeatureStore() + store.init({ FEATURES: { 'key1': flag1, 'key2': flag2, 'key3': flag3 } }) + client = make_client(store) + state = client.all_flags_state(user, with_reasons=True, details_only_for_tracked_flags=True) + assert state.valid == True + result = state.to_json_dict() + assert result == { + 'key1': 'value1', + 'key2': 'value2', + 'key3': 'value3', + '$flagsState': { + 'key1': { + 'variation': 0 + }, + 'key2': { + 'variation': 1, + 'version': 200, + 'trackEvents': True, + 'reason': {'kind': 'OFF'} + }, + 'key3': { + 'variation': 1, + 'version': 300, + 'debugEventsUntilDate': 1000, + 'reason': {'kind': 'OFF'} + } + }, + '$valid': True + } + def test_all_flags_state_returns_empty_state_if_user_is_none(): store = InMemoryFeatureStore() store.init({ FEATURES: { 'key1': flag1, 'key2': flag2 } }) From 89056fc7587ba16f9573e707c82be42af14a7b20 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 8 Oct 2018 16:33:39 -0700 Subject: [PATCH 002/190] fix logic for whether a flag is tracked in all_flags_state --- ldclient/flags_state.py | 8 +++++++- testing/test_ldclient_evaluation.py | 6 ++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/ldclient/flags_state.py b/ldclient/flags_state.py index cbfde1ec..c5a8ab41 100644 --- a/ldclient/flags_state.py +++ b/ldclient/flags_state.py @@ -1,4 +1,5 @@ import json +import time class FeatureFlagsState(object): """ @@ -17,7 +18,12 @@ def add_flag(self, flag, value, variation, reason, details_only_if_tracked): key = flag['key'] self.__flag_values[key] = value meta = {} - if (not details_only_if_tracked) or flag.get('trackEvents') or flag.get('debugEventsUntilDate'): + with_details = (not details_only_if_tracked) or flag.get('trackEvents') + if not with_details: + if flag.get('debugEventsUntilDate'): + now = int(time.time() * 1000) + with_details = (flag.get('debugEventsUntilDate') > now) + if with_details: meta['version'] = flag.get('version') if reason is not None: meta['reason'] = reason diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index 81719564..46c48756 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -1,5 +1,6 @@ import pytest import json +import time from ldclient.client import LDClient, Config from ldclient.feature_store import InMemoryFeatureStore from ldclient.flag import EvaluationDetail @@ -228,6 +229,7 @@ def test_all_flags_state_can_be_filtered_for_client_side_flags(): assert values == { 'client-side-1': 'value1', 'client-side-2': 'value2' } def test_all_flags_state_can_omit_details_for_untracked_flags(): + future_time = (time.time() * 1000) + 100000 flag1 = { 'key': 'key1', 'version': 100, @@ -250,7 +252,7 @@ def test_all_flags_state_can_omit_details_for_untracked_flags(): 'on': False, 'offVariation': 1, 'variations': [ 'x', 'value3' ], - 'debugEventsUntilDate': 1000 + 'debugEventsUntilDate': future_time } store = InMemoryFeatureStore() store.init({ FEATURES: { 'key1': flag1, 'key2': flag2, 'key3': flag3 } }) @@ -275,7 +277,7 @@ def test_all_flags_state_can_omit_details_for_untracked_flags(): 'key3': { 'variation': 1, 'version': 300, - 'debugEventsUntilDate': 1000, + 'debugEventsUntilDate': future_time, 'reason': {'kind': 'OFF'} } }, From 1fc23e4e5a1e4d0a1f4256df5faf1c36bf85c4eb Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 14 Oct 2018 00:25:44 -0700 Subject: [PATCH 003/190] use expiringdict from PyPi --- NOTICE.txt | 2 - ldclient/expiringdict.py | 155 -------------------------------- ldclient/redis_feature_store.py | 2 +- requirements.txt | 1 + 4 files changed, 2 insertions(+), 158 deletions(-) delete mode 100644 NOTICE.txt delete mode 100644 ldclient/expiringdict.py diff --git a/NOTICE.txt b/NOTICE.txt deleted file mode 100644 index 24f9d0e4..00000000 --- a/NOTICE.txt +++ /dev/null @@ -1,2 +0,0 @@ -This product includes software (ExpiringDict) developed by -Mailgun (https://github.com/mailgun/expiringdict). \ No newline at end of file diff --git a/ldclient/expiringdict.py b/ldclient/expiringdict.py deleted file mode 100644 index 4b244c21..00000000 --- a/ldclient/expiringdict.py +++ /dev/null @@ -1,155 +0,0 @@ -''' -Dictionary with auto-expiring values for caching purposes. - -Expiration happens on any access, object is locked during cleanup from expired -values. Can not store more than max_len elements - the oldest will be deleted. - ->>> ExpiringDict(max_len=100, max_age_seconds=10) - -The values stored in the following way: -{ - key1: (value1, created_time1), - key2: (value2, created_time2) -} - -NOTE: iteration over dict and also keys() do not remove expired values! - -Copied from https://github.com/mailgun/expiringdict/commit/d17d071721dd12af6829819885a74497492d7fb7 under the APLv2 - -TODO - Use PyPI version once https://github.com/mailgun/expiringdict/issues/13 has been fixed so that -https://github.com/mailgun/expiringdict/commit/62c50ce7083a1557a1140dae19145f3a0a7a1a14 is patched -''' - -import time -from threading import RLock - -from collections import OrderedDict - - -class ExpiringDict(OrderedDict): - - def __init__(self, max_len, max_age_seconds): - assert max_age_seconds >= 0 - assert max_len >= 1 - - OrderedDict.__init__(self) - self.max_len = max_len - self.max_age = max_age_seconds - self.lock = RLock() - - def __contains__(self, key): - """ Return True if the dict has a key, else return False. """ - try: - with self.lock: - item = OrderedDict.__getitem__(self, key) - if time.time() - item[1] < self.max_age: - return True - else: - del self[key] - except KeyError: - pass - return False - - def __getitem__(self, key, with_age=False): - """ Return the item of the dict. - - Raises a KeyError if key is not in the map. - """ - with self.lock: - item = OrderedDict.__getitem__(self, key) - item_age = time.time() - item[1] - if item_age < self.max_age: - if with_age: - return item[0], item_age - else: - return item[0] - else: - del self[key] - raise KeyError(key) - - def __setitem__(self, key, value): - """ Set d[key] to value. """ - with self.lock: - if len(self) == self.max_len: - self.popitem(last=False) - OrderedDict.__setitem__(self, key, (value, time.time())) - - def pop(self, key, default=None): - """ Get item from the dict and remove it. - - Return default if expired or does not exist. Never raise KeyError. - """ - with self.lock: - try: - item = OrderedDict.__getitem__(self, key) - del self[key] - return item[0] - except KeyError: - return default - - def ttl(self, key): - """ Return TTL of the `key` (in seconds). - - Returns None for non-existent or expired keys. - """ - key_value, key_age = self.get(key, with_age=True) - if key_age: - key_ttl = self.max_age - key_age - if key_ttl > 0: - return key_ttl - return None - - def get(self, key, default=None, with_age=False): - " Return the value for key if key is in the dictionary, else default. " - try: - return self.__getitem__(key, with_age) - except KeyError: - if with_age: - return default, None - else: - return default - - def items(self): - """ Return a copy of the dictionary's list of (key, value) pairs. """ - r = [] - for key in self: - try: - r.append((key, self[key])) - except KeyError: - pass - return r - - def values(self): - """ Return a copy of the dictionary's list of values. - See the note for dict.items(). """ - r = [] - for key in self: - try: - r.append(self[key]) - except KeyError: - pass - return r - - def fromkeys(self): - " Create a new dictionary with keys from seq and values set to value. " - raise NotImplementedError() - - def iteritems(self): - """ Return an iterator over the dictionary's (key, value) pairs. """ - raise NotImplementedError() - - def itervalues(self): - """ Return an iterator over the dictionary's values. """ - raise NotImplementedError() - - def viewitems(self): - " Return a new view of the dictionary's items ((key, value) pairs). " - raise NotImplementedError() - - def viewkeys(self): - """ Return a new view of the dictionary's keys. """ - raise NotImplementedError() - - def viewvalues(self): - """ Return a new view of the dictionary's values. """ - raise NotImplementedError() diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index b016a1eb..71b7261b 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -1,10 +1,10 @@ import json from pprint import pprint +from expiringdict import ExpiringDict import redis from ldclient import log -from ldclient.expiringdict import ExpiringDict from ldclient.interfaces import FeatureStore from ldclient.memoized_value import MemoizedValue from ldclient.versioned_data_kind import FEATURES diff --git a/requirements.txt b/requirements.txt index 90a5ef51..8787ac53 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ backoff>=1.4.3 certifi>=2018.4.16 +expiringdict>=1.1.4 future>=0.16.0 six>=1.10.0 pyRFC3339>=1.0 From ae8b25eb33ad3dbca21231f22ece4a96694f731c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 15:04:39 -0700 Subject: [PATCH 004/190] implement file data source, not including auto-update --- ldclient/file_data_source.py | 95 ++++++++++++++ test-requirements.txt | 3 +- testing/test_file_data_source.py | 205 +++++++++++++++++++++++++++++++ 3 files changed, 302 insertions(+), 1 deletion(-) create mode 100644 ldclient/file_data_source.py create mode 100644 testing/test_file_data_source.py diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py new file mode 100644 index 00000000..d82930d9 --- /dev/null +++ b/ldclient/file_data_source.py @@ -0,0 +1,95 @@ +import json +import six +import traceback + +have_yaml = False +try: + import yaml + have_yaml = True +except ImportError: + pass + +from ldclient.interfaces import UpdateProcessor +from ldclient.util import log +from ldclient.versioned_data_kind import FEATURES, SEGMENTS + + +class FileDataSource(UpdateProcessor): + @classmethod + def factory(cls, **kwargs): + return lambda config, store, ready : FileDataSource(store, kwargs, ready) + + def __init__(self, store, options, ready): + self._store = store + self._ready = ready + self._inited = False + self._paths = options.get('paths', []) + if isinstance(self._paths, six.string_types): + self._paths = [ self._paths ] + + def start(self): + self._load_all() + + # We will signal readiness immediately regardless of whether the file load succeeded or failed - + # the difference can be detected by checking initialized() + self._ready.set() + + def stop(self): + pass + + def initialized(self): + return self._inited + + def _load_all(self): + all_data = { FEATURES: {}, SEGMENTS: {} } + print "Loading: %s" % self._paths + for path in self._paths: + try: + self._load_file(path, all_data) + except Exception as e: + log.error('Unable to load flag data from "%s": %s' % (path, repr(e))) + traceback.print_exc() + return + print "Initing: %s" % all_data + self._store.init(all_data) + self._inited = True + + def _load_file(self, path, all_data): + content = None + with open(path, 'r') as f: + content = f.read() + parsed = self._parse_content(content) + for key, flag in six.iteritems(parsed.get('flags', {})): + self._add_item(all_data, FEATURES, flag) + for key, value in six.iteritems(parsed.get('flagValues', {})): + self._add_item(all_data, FEATURES, self._make_flag_with_value(key, value)) + for key, segment in six.iteritems(parsed.get('segments', {})): + self._add_item(all_data, SEGMENTS, segment) + + def _parse_content(self, content): + if have_yaml: + if content.strip().startswith("{"): + print("json: %s" % content) + return json.loads(content) + else: + return yaml.load(content) + print("json: %s" % content) + return json.loads(content) + + def _add_item(self, all_data, kind, item): + items = all_data[kind] + key = item.get('key') + if items.get(key) is None: + items[key] = item + else: + raise Exception('In %s, key "%s" was used more than once' % (kind.namespace, key)) + + def _make_flag_with_value(self, key, value): + return { + 'key': key, + 'on': True, + 'fallthrough': { + 'variation': 0 + }, + 'variations': [ value ] + } diff --git a/test-requirements.txt b/test-requirements.txt index ee547312..1aa5903e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -4,4 +4,5 @@ redis>=2.10.5 coverage>=4.4 pytest-capturelog>=0.7 pytest-cov>=2.4.0 -codeclimate-test-reporter>=0.2.1 \ No newline at end of file +codeclimate-test-reporter>=0.2.1 +pyyaml>=3.0 diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py new file mode 100644 index 00000000..de4d9393 --- /dev/null +++ b/testing/test_file_data_source.py @@ -0,0 +1,205 @@ +import json +import os +import pytest +import tempfile +import threading +import time + +from ldclient.client import LDClient +from ldclient.config import Config +from ldclient.feature_store import InMemoryFeatureStore +from ldclient.file_data_source import FileDataSource +from ldclient.versioned_data_kind import FEATURES, SEGMENTS + + +all_flag_keys = [ 'flag1', 'flag2' ] +all_properties_json = ''' + { + "flags": { + "flag1": { + "key": "flag1", + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] + } + }, + "flagValues": { + "flag2": "value2" + }, + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +''' + +all_properties_yaml = ''' +--- +flags: + flag1: + key: flag1 + "on": true +flagValues: + flag2: value2 +segments: + seg1: + key: seg1 + include: ["user1"] +''' + +flag_only_json = ''' + { + "flags": { + "flag1": { + "key": "flag1", + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] + } + } + } +''' + +segment_only_json = ''' + { + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +''' + +fds = None +store = None +ready = None + + +def setup_function(): + global fds, store, ready + store = InMemoryFeatureStore() + ready = threading.Event() + +def teardown_function(): + if fds is not None: + fds.stop() + +def make_temp_file(content): + f, path = tempfile.mkstemp() + os.write(f, content) + os.close(f) + return path + +def replace_file(path, content): + with open(path, 'w') as f: + f.write(content) + +def test_does_not_load_data_prior_to_start(): + path = make_temp_file('{"flagValues":{"key":"value"}}') + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + assert ready.is_set() is False + assert fds.initialized() is False + assert store.initialized is False + finally: + os.remove(path) + +def test_loads_flags_on_start_from_json(): + path = make_temp_file(all_properties_json) + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + fds.start() + assert store.initialized is True + assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys + finally: + os.remove(path) + +def test_loads_flags_on_start_from_yaml(): + path = make_temp_file(all_properties_yaml) + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + fds.start() + assert store.initialized is True + assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys + finally: + os.remove(path) + +def test_sets_ready_event_and_initialized_on_successful_load(): + path = make_temp_file(all_properties_json) + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + fds.start() + assert fds.initialized() is True + assert ready.is_set() is True + finally: + os.remove(path) + +def test_sets_ready_event_and_does_not_set_initialized_on_unsuccessful_load(): + bad_file_path = 'no-such-file' + fds = FileDataSource.factory(paths = bad_file_path)(Config(), store, ready) + fds.start() + assert fds.initialized() is False + assert ready.is_set() is True + +def test_can_load_multiple_files(): + path1 = make_temp_file(flag_only_json) + path2 = make_temp_file(segment_only_json) + try: + fds = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) + fds.start() + assert len(store.all(FEATURES, lambda x: x)) == 1 + assert len(store.all(SEGMENTS, lambda x: x)) == 1 + finally: + os.remove(path1) + os.remove(path2) + +def test_does_not_allow_duplicate_keys(): + path1 = make_temp_file(flag_only_json) + path2 = make_temp_file(flag_only_json) + try: + fds = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) + fds.start() + assert len(store.all(FEATURES, lambda x: x)) == 0 + finally: + os.remove(path1) + os.remove(path2) + +def test_does_not_reload_modified_file_if_auto_update_is_off(): + path = make_temp_file(flag_only_json) + try: + fds = FileDataSource.factory(paths = path)(Config(), store, ready) + fds.start() + assert len(store.all(SEGMENTS, lambda x: x)) == 0 + time.sleep(0.5) + replace_file(path, segment_only_json) + time.sleep(0.5) + assert len(store.all(SEGMENTS, lambda x: x)) == 0 + finally: + os.remove(path) + +def test_evaluates_full_flag_with_client_as_expected(): + path = make_temp_file(all_properties_json) + try: + fds = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = fds, send_events = False)) + value = client.variation('flag1', { 'key': 'user' }, '') + assert value == 'on' + finally: + os.remove(path) + +def test_evaluates_simplified_flag_with_client_as_expected(): + path = make_temp_file(all_properties_json) + try: + fds = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = fds, send_events = False)) + value = client.variation('flag2', { 'key': 'user' }, '') + assert value == 'value2' + finally: + os.remove(path) From 850837d72794b6e5e175590304fe844b808213d8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 15:53:56 -0700 Subject: [PATCH 005/190] rm debugging --- ldclient/file_data_source.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index d82930d9..5ebb062d 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -42,7 +42,6 @@ def initialized(self): def _load_all(self): all_data = { FEATURES: {}, SEGMENTS: {} } - print "Loading: %s" % self._paths for path in self._paths: try: self._load_file(path, all_data) From aa7684a5181143a0d7c0874c3d01f1b837f4c3b2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 16:01:26 -0700 Subject: [PATCH 006/190] rm debugging --- ldclient/file_data_source.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 5ebb062d..a8351ba6 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -49,7 +49,6 @@ def _load_all(self): log.error('Unable to load flag data from "%s": %s' % (path, repr(e))) traceback.print_exc() return - print "Initing: %s" % all_data self._store.init(all_data) self._inited = True @@ -68,11 +67,9 @@ def _load_file(self, path, all_data): def _parse_content(self, content): if have_yaml: if content.strip().startswith("{"): - print("json: %s" % content) return json.loads(content) else: return yaml.load(content) - print("json: %s" % content) return json.loads(content) def _add_item(self, all_data, kind, item): From 39c90424302e934502db4cff01e9f9de96cd2e65 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 16:30:34 -0700 Subject: [PATCH 007/190] Python 3 compatibility fix --- testing/test_file_data_source.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index de4d9393..9b4a2c7b 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -1,6 +1,7 @@ import json import os import pytest +import six import tempfile import threading import time @@ -93,7 +94,7 @@ def teardown_function(): def make_temp_file(content): f, path = tempfile.mkstemp() - os.write(f, content) + os.write(f, six.b(content)) os.close(f) return path From a43bf0c56789f26f80199e260ca04c4b9cb6b918 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:14:55 -0700 Subject: [PATCH 008/190] add file watching, update documentation and tests --- ldclient/file_data_source.py | 194 ++++++++++++++++++++++++++++++- test-requirements.txt | 1 + testing/test_file_data_source.py | 76 +++++++----- 3 files changed, 239 insertions(+), 32 deletions(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index a8351ba6..09afa263 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -1,4 +1,5 @@ import json +import os import six import traceback @@ -9,7 +10,17 @@ except ImportError: pass +have_watchdog = False +try: + import watchdog + import watchdog.events + import watchdog.observers + have_watchdog = True +except ImportError: + pass + from ldclient.interfaces import UpdateProcessor +from ldclient.repeating_timer import RepeatingTimer from ldclient.util import log from ldclient.versioned_data_kind import FEATURES, SEGMENTS @@ -17,6 +28,101 @@ class FileDataSource(UpdateProcessor): @classmethod def factory(cls, **kwargs): + """Provides a way to use local files as a source of feature flag state. This would typically be + used in a test environment, to operate using a predetermined feature flag state without an + actual LaunchDarkly connection. + + To use this component, call `FileDataSource.factory`, and store its return value in the + `update_processor_class` property of your LaunchDarkly client configuration. In the options + to `factory`, set `paths` to the file path(s) of your data file(s): + :: + + factory = FileDataSource.factory(paths: [ myFilePath ]) + config = Config(update_processor_class = factory) + + This will cause the client not to connect to LaunchDarkly to get feature flags. The + client may still make network connections to send analytics events, unless you have disabled + this with Config.send_events or Config.offline. + + Flag data files can be either JSON or YAML (in order to use YAML, you must install the 'pyyaml' + package). They contain an object with three possible properties: + + * "flags": Feature flag definitions. + * "flagValues": Simplified feature flags that contain only a value. + * "segments": User segment definitions. + + The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + and is subject to change. Rather than trying to construct these objects yourself, it is simpler + to request existing flags directly from the LaunchDarkly server in JSON format, and use this + output as the starting point for your file. In Linux you would do this: + :: + + curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + + The output will look something like this (but with many more properties): + :: + + { + "flags": { + "flag-key-1": { + "key": "flag-key-1", + "on": true, + "variations": [ "a", "b" ] + } + }, + "segments": { + "segment-key-1": { + "key": "segment-key-1", + "includes": [ "user-key-1" ] + } + } + } + + Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + set specific flag keys to specific values. For that, you can use a much simpler format: + :: + + { + "flagValues": { + "my-string-flag-key": "value-1", + "my-boolean-flag-key": true, + "my-integer-flag-key": 3 + } + } + + Or, in YAML: + :: + + flagValues: + my-string-flag-key: "value-1" + my-boolean-flag-key: true + my-integer-flag-key: 1 + + It is also possible to specify both "flags" and "flagValues", if you want some flags + to have simple values and others to have complex behavior. However, it is an error to use the + same flag key or segment key more than once, either in a single file or across multiple files. + + If the data source encounters any error in any file-- malformed content, a missing file, or a + duplicate key-- it will not load flags from any of the files. + + :param kwargs: + See below + + :Keyword arguments: + * **paths** (array): The paths of the source files for loading flag data. These may be absolute paths + or relative to the current working directory. Files will be parsed as JSON unless the 'pyyaml' + package is installed, in which case YAML is also allowed. + * **auto_update** (boolean): True if the data source should watch for changes to the source file(s) + and reload flags whenever there is a change. The default implementation of this feature is based on + polling the filesystem, which may not perform well; if you install the 'watchdog' package (not + included by default, to avoid adding unwanted dependencies to the SDK), its native file watching + mechanism will be used instead. Note that auto-updating will only work if all of the files you + specified have valid directory paths at startup time. + * **poll_interval** (float): The minimum interval, in seconds, between checks for file modifications - + used only if auto_update is true, and if the native file-watching mechanism from 'watchdog' is not + being used. The default value is 1 second. + """ return lambda config, store, ready : FileDataSource(store, kwargs, ready) def __init__(self, store, options, ready): @@ -26,16 +132,25 @@ def __init__(self, store, options, ready): self._paths = options.get('paths', []) if isinstance(self._paths, six.string_types): self._paths = [ self._paths ] - + self._auto_update = options.get('auto_update', False) + self._poll_interval = options.get('poll_interval', 1) + self._force_polling = options.get('force_polling', False) # used only in tests + def start(self): self._load_all() + if self._auto_update: + self._auto_updater = self._start_auto_updater() + else: + self._auto_updater = None + # We will signal readiness immediately regardless of whether the file load succeeded or failed - # the difference can be detected by checking initialized() self._ready.set() def stop(self): - pass + if self._auto_updater: + self._auto_updater.stop() def initialized(self): return self._inited @@ -66,10 +181,7 @@ def _load_file(self, path, all_data): def _parse_content(self, content): if have_yaml: - if content.strip().startswith("{"): - return json.loads(content) - else: - return yaml.load(content) + return yaml.load(content) # pyyaml correctly parses JSON too return json.loads(content) def _add_item(self, all_data, kind, item): @@ -89,3 +201,73 @@ def _make_flag_with_value(self, key, value): }, 'variations': [ value ] } + + def _start_auto_updater(self): + resolved_paths = [] + for path in self._paths: + try: + resolved_paths.append(os.path.realpath(path)) + except: + log.warn('Cannot watch for changes to data file "%s" because it is an invalid path' % path) + if have_watchdog and not self._force_polling: + return FileDataSource.WatchdogAutoUpdater(resolved_paths, self._load_all) + else: + return FileDataSource.PollingAutoUpdater(resolved_paths, self._load_all, self._poll_interval) + + # Watch for changes to data files using the watchdog package. This uses native OS filesystem notifications + # if available for the current platform. + class WatchdogAutoUpdater(object): + def __init__(self, resolved_paths, reloader): + watched_files = set(resolved_paths) + + class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): + def on_any_event(self, event): + if event.src_path in watched_files: + reloader() + + dir_paths = set() + for path in resolved_paths: + dir_paths.add(os.path.dirname(path)) + + self._observer = watchdog.observers.Observer() + handler = LDWatchdogHandler() + for path in dir_paths: + self._observer.schedule(handler, path) + self._observer.start() + + def stop(self): + self._observer.stop() + self._observer.join() + + # Watch for changes to data files by polling their modification times. This is used if auto-update is + # on but the watchdog package is not installed. + class PollingAutoUpdater(object): + def __init__(self, resolved_paths, reloader, interval): + self._paths = resolved_paths + self._reloader = reloader + self._file_times = self._check_file_times() + self._timer = RepeatingTimer(interval, self._poll) + self._timer.start() + + def stop(self): + self._timer.stop() + + def _poll(self): + new_times = self._check_file_times() + changed = False + for file_path, file_time in six.iteritems(self._file_times): + if new_times.get(file_path) is not None and new_times.get(file_path) != file_time: + changed = True + break + self._file_times = new_times + if changed: + self._reloader() + + def _check_file_times(self): + ret = {} + for path in self._paths: + try: + ret[path] = os.path.getmtime(path) + except: + ret[path] = None + return ret diff --git a/test-requirements.txt b/test-requirements.txt index 1aa5903e..413ef355 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,3 +6,4 @@ pytest-capturelog>=0.7 pytest-cov>=2.4.0 codeclimate-test-reporter>=0.2.1 pyyaml>=3.0 +watchdog>=0.9 diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 9b4a2c7b..4fa16bff 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -78,19 +78,19 @@ } ''' -fds = None +data_source = None store = None ready = None def setup_function(): - global fds, store, ready + global data_source, store, ready store = InMemoryFeatureStore() ready = threading.Event() def teardown_function(): - if fds is not None: - fds.stop() + if data_source is not None: + data_source.stop() def make_temp_file(content): f, path = tempfile.mkstemp() @@ -105,9 +105,9 @@ def replace_file(path, content): def test_does_not_load_data_prior_to_start(): path = make_temp_file('{"flagValues":{"key":"value"}}') try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) assert ready.is_set() is False - assert fds.initialized() is False + assert data_source.initialized() is False assert store.initialized is False finally: os.remove(path) @@ -115,8 +115,8 @@ def test_does_not_load_data_prior_to_start(): def test_loads_flags_on_start_from_json(): path = make_temp_file(all_properties_json) try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source.start() assert store.initialized is True assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys finally: @@ -125,8 +125,8 @@ def test_loads_flags_on_start_from_json(): def test_loads_flags_on_start_from_yaml(): path = make_temp_file(all_properties_yaml) try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source.start() assert store.initialized is True assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys finally: @@ -135,26 +135,26 @@ def test_loads_flags_on_start_from_yaml(): def test_sets_ready_event_and_initialized_on_successful_load(): path = make_temp_file(all_properties_json) try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) - fds.start() - assert fds.initialized() is True + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source.start() + assert data_source.initialized() is True assert ready.is_set() is True finally: os.remove(path) def test_sets_ready_event_and_does_not_set_initialized_on_unsuccessful_load(): bad_file_path = 'no-such-file' - fds = FileDataSource.factory(paths = bad_file_path)(Config(), store, ready) - fds.start() - assert fds.initialized() is False + data_source = FileDataSource.factory(paths = bad_file_path)(Config(), store, ready) + data_source.start() + assert data_source.initialized() is False assert ready.is_set() is True def test_can_load_multiple_files(): path1 = make_temp_file(flag_only_json) path2 = make_temp_file(segment_only_json) try: - fds = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) + data_source.start() assert len(store.all(FEATURES, lambda x: x)) == 1 assert len(store.all(SEGMENTS, lambda x: x)) == 1 finally: @@ -165,8 +165,8 @@ def test_does_not_allow_duplicate_keys(): path1 = make_temp_file(flag_only_json) path2 = make_temp_file(flag_only_json) try: - fds = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) + data_source.start() assert len(store.all(FEATURES, lambda x: x)) == 0 finally: os.remove(path1) @@ -175,8 +175,8 @@ def test_does_not_allow_duplicate_keys(): def test_does_not_reload_modified_file_if_auto_update_is_off(): path = make_temp_file(flag_only_json) try: - fds = FileDataSource.factory(paths = path)(Config(), store, ready) - fds.start() + data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + data_source.start() assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) @@ -185,22 +185,46 @@ def test_does_not_reload_modified_file_if_auto_update_is_off(): finally: os.remove(path) +def do_auto_update_test(options): + path = make_temp_file(flag_only_json) + options['paths'] = path + try: + data_source = FileDataSource.factory(**options)(Config(), store, ready) + data_source.start() + assert len(store.all(SEGMENTS, lambda x: x)) == 0 + time.sleep(0.5) + replace_file(path, segment_only_json) + time.sleep(0.5) + assert len(store.all(SEGMENTS, lambda x: x)) == 1 + finally: + os.remove(path) + +def test_reloads_modified_file_if_auto_update_is_on(): + do_auto_update_test({ 'auto_update': True }) + +def test_reloads_modified_file_in_polling_mode(): + do_auto_update_test({ 'auto_update': True, 'force_polling': True, 'poll_interval': 0.1 }) + def test_evaluates_full_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - fds = FileDataSource.factory(paths = path) - client = LDClient(config=Config(update_processor_class = fds, send_events = False)) + data_source = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = data_source, send_events = False)) value = client.variation('flag1', { 'key': 'user' }, '') assert value == 'on' finally: os.remove(path) + if client is not None: + client.close() def test_evaluates_simplified_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - fds = FileDataSource.factory(paths = path) - client = LDClient(config=Config(update_processor_class = fds, send_events = False)) + data_source = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = data_source, send_events = False)) value = client.variation('flag2', { 'key': 'user' }, '') assert value == 'value2' finally: os.remove(path) + if client is not None: + client.close() From 2cea73061eaba3d4d7ac812e9fbf9fffb7de5712 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:19:29 -0700 Subject: [PATCH 009/190] readme --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 57aac968..edef13e6 100644 --- a/README.md +++ b/README.md @@ -78,6 +78,10 @@ Supported Python versions ---------- The SDK is tested with the most recent patch releases of Python 2.7, 3.3, 3.4, 3.5, and 3.6. Python 2.6 is no longer supported. +Using flag data from a file +--------------------------- +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.py`](https://github.com/launchdarkly/python-client/blob/master/ldclient/file_data_source.py) for more details. + Learn more ----------- From dcf1afe6f7f1fd1535450a36fd26af18afd5c6af Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:20:42 -0700 Subject: [PATCH 010/190] debugging --- ldclient/file_data_source.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 09afa263..79d9655f 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -218,10 +218,12 @@ def _start_auto_updater(self): # if available for the current platform. class WatchdogAutoUpdater(object): def __init__(self, resolved_paths, reloader): + print("*** all paths: %s" % resolved_paths) watched_files = set(resolved_paths) class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): def on_any_event(self, event): + print("*** got event: %s" % event.src_path) if event.src_path in watched_files: reloader() @@ -232,6 +234,7 @@ def on_any_event(self, event): self._observer = watchdog.observers.Observer() handler = LDWatchdogHandler() for path in dir_paths: + print("*** watching: %s" % path) self._observer.schedule(handler, path) self._observer.start() From 4e98fdd3f3c0e0ecfbf608643ad17268c92925fa Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:35:03 -0700 Subject: [PATCH 011/190] debugging --- testing/test_file_data_source.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 4fa16bff..7e565c17 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -194,8 +194,14 @@ def do_auto_update_test(options): assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) - time.sleep(0.5) - assert len(store.all(SEGMENTS, lambda x: x)) == 1 + print("*** modified file %s" % path) + deadline = time.time() + 10 + while time.time() < deadline: + time.sleep(0.1) + if len(store.all(SEGMENTS, lambda x: x)) == 1: + return + print("*** checked") + assert False, "Flags were not reloaded after 10 seconds" finally: os.remove(path) From 8f3c2217805da177d412a6a5543982ad3e118ca6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:41:52 -0700 Subject: [PATCH 012/190] debugging --- ldclient/file_data_source.py | 2 ++ testing/test_file_data_source.py | 5 ++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 79d9655f..c1be6974 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -239,6 +239,7 @@ def on_any_event(self, event): self._observer.start() def stop(self): + print("*** stopping observer") self._observer.stop() self._observer.join() @@ -253,6 +254,7 @@ def __init__(self, resolved_paths, reloader, interval): self._timer.start() def stop(self): + print("*** stopping polling") self._timer.stop() def _poll(self): diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 7e565c17..e62fff62 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -199,16 +199,19 @@ def do_auto_update_test(options): while time.time() < deadline: time.sleep(0.1) if len(store.all(SEGMENTS, lambda x: x)) == 1: + print("*** success on %s" % path) return - print("*** checked") + print("*** checked %s" % path) assert False, "Flags were not reloaded after 10 seconds" finally: os.remove(path) def test_reloads_modified_file_if_auto_update_is_on(): + print("*** with watchdog") do_auto_update_test({ 'auto_update': True }) def test_reloads_modified_file_in_polling_mode(): + print("*** with polling") do_auto_update_test({ 'auto_update': True, 'force_polling': True, 'poll_interval': 0.1 }) def test_evaluates_full_flag_with_client_as_expected(): From 84276ddc908a1de2fae9922aaaf538d5eac560a1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:48:26 -0700 Subject: [PATCH 013/190] fix cleanup logic --- ldclient/file_data_source.py | 3 +- testing/test_file_data_source.py | 53 +++++++++++++++++--------------- 2 files changed, 30 insertions(+), 26 deletions(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index c1be6974..0b51cfdd 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -133,6 +133,7 @@ def __init__(self, store, options, ready): if isinstance(self._paths, six.string_types): self._paths = [ self._paths ] self._auto_update = options.get('auto_update', False) + self._auto_updater = None self._poll_interval = options.get('poll_interval', 1) self._force_polling = options.get('force_polling', False) # used only in tests @@ -141,8 +142,6 @@ def start(self): if self._auto_update: self._auto_updater = self._start_auto_updater() - else: - self._auto_updater = None # We will signal readiness immediately regardless of whether the file load succeeded or failed - # the difference can be detected by checking initialized() diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index e62fff62..8a8f5d5a 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -92,6 +92,11 @@ def teardown_function(): if data_source is not None: data_source.stop() +def make_data_source(**kwargs): + global data_source + data_source = FileDataSource.factory(**kwargs)(Config(), store, ready) + return data_source + def make_temp_file(content): f, path = tempfile.mkstemp() os.write(f, six.b(content)) @@ -105,9 +110,9 @@ def replace_file(path, content): def test_does_not_load_data_prior_to_start(): path = make_temp_file('{"flagValues":{"key":"value"}}') try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) + source = make_data_source(paths = path) assert ready.is_set() is False - assert data_source.initialized() is False + assert source.initialized() is False assert store.initialized is False finally: os.remove(path) @@ -115,8 +120,8 @@ def test_does_not_load_data_prior_to_start(): def test_loads_flags_on_start_from_json(): path = make_temp_file(all_properties_json) try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) - data_source.start() + source = make_data_source(paths = path) + source.start() assert store.initialized is True assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys finally: @@ -125,8 +130,8 @@ def test_loads_flags_on_start_from_json(): def test_loads_flags_on_start_from_yaml(): path = make_temp_file(all_properties_yaml) try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) - data_source.start() + source = make_data_source(paths = path) + source.start() assert store.initialized is True assert sorted(list(store.all(FEATURES, lambda x: x).keys())) == all_flag_keys finally: @@ -135,26 +140,26 @@ def test_loads_flags_on_start_from_yaml(): def test_sets_ready_event_and_initialized_on_successful_load(): path = make_temp_file(all_properties_json) try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) - data_source.start() - assert data_source.initialized() is True + source = make_data_source(paths = path) + source.start() + assert source.initialized() is True assert ready.is_set() is True finally: os.remove(path) def test_sets_ready_event_and_does_not_set_initialized_on_unsuccessful_load(): bad_file_path = 'no-such-file' - data_source = FileDataSource.factory(paths = bad_file_path)(Config(), store, ready) - data_source.start() - assert data_source.initialized() is False + source = make_data_source(paths = bad_file_path) + source.start() + assert source.initialized() is False assert ready.is_set() is True def test_can_load_multiple_files(): path1 = make_temp_file(flag_only_json) path2 = make_temp_file(segment_only_json) try: - data_source = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) - data_source.start() + source = make_data_source(paths = [ path1, path2 ]) + source.start() assert len(store.all(FEATURES, lambda x: x)) == 1 assert len(store.all(SEGMENTS, lambda x: x)) == 1 finally: @@ -165,8 +170,8 @@ def test_does_not_allow_duplicate_keys(): path1 = make_temp_file(flag_only_json) path2 = make_temp_file(flag_only_json) try: - data_source = FileDataSource.factory(paths = [ path1, path2 ])(Config(), store, ready) - data_source.start() + source = make_data_source(paths = [ path1, path2 ]) + source.start() assert len(store.all(FEATURES, lambda x: x)) == 0 finally: os.remove(path1) @@ -175,8 +180,8 @@ def test_does_not_allow_duplicate_keys(): def test_does_not_reload_modified_file_if_auto_update_is_off(): path = make_temp_file(flag_only_json) try: - data_source = FileDataSource.factory(paths = path)(Config(), store, ready) - data_source.start() + source = make_data_source(paths = path) + source.start() assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) @@ -189,8 +194,8 @@ def do_auto_update_test(options): path = make_temp_file(flag_only_json) options['paths'] = path try: - data_source = FileDataSource.factory(**options)(Config(), store, ready) - data_source.start() + source = make_data_source(**options) + source.start() assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) @@ -217,8 +222,8 @@ def test_reloads_modified_file_in_polling_mode(): def test_evaluates_full_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - data_source = FileDataSource.factory(paths = path) - client = LDClient(config=Config(update_processor_class = data_source, send_events = False)) + factory = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = factory, send_events = False)) value = client.variation('flag1', { 'key': 'user' }, '') assert value == 'on' finally: @@ -229,8 +234,8 @@ def test_evaluates_full_flag_with_client_as_expected(): def test_evaluates_simplified_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - data_source = FileDataSource.factory(paths = path) - client = LDClient(config=Config(update_processor_class = data_source, send_events = False)) + factory = FileDataSource.factory(paths = path) + client = LDClient(config=Config(update_processor_class = factory, send_events = False)) value = client.variation('flag2', { 'key': 'user' }, '') assert value == 'value2' finally: From 2a822e6e82a1e8dffcdfd59d183d43219dff391c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 13:50:31 -0700 Subject: [PATCH 014/190] rm debugging --- ldclient/file_data_source.py | 5 ----- testing/test_file_data_source.py | 5 ----- 2 files changed, 10 deletions(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 0b51cfdd..c4013a52 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -217,12 +217,10 @@ def _start_auto_updater(self): # if available for the current platform. class WatchdogAutoUpdater(object): def __init__(self, resolved_paths, reloader): - print("*** all paths: %s" % resolved_paths) watched_files = set(resolved_paths) class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): def on_any_event(self, event): - print("*** got event: %s" % event.src_path) if event.src_path in watched_files: reloader() @@ -233,12 +231,10 @@ def on_any_event(self, event): self._observer = watchdog.observers.Observer() handler = LDWatchdogHandler() for path in dir_paths: - print("*** watching: %s" % path) self._observer.schedule(handler, path) self._observer.start() def stop(self): - print("*** stopping observer") self._observer.stop() self._observer.join() @@ -253,7 +249,6 @@ def __init__(self, resolved_paths, reloader, interval): self._timer.start() def stop(self): - print("*** stopping polling") self._timer.stop() def _poll(self): diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 8a8f5d5a..68d1e5b7 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -199,24 +199,19 @@ def do_auto_update_test(options): assert len(store.all(SEGMENTS, lambda x: x)) == 0 time.sleep(0.5) replace_file(path, segment_only_json) - print("*** modified file %s" % path) deadline = time.time() + 10 while time.time() < deadline: time.sleep(0.1) if len(store.all(SEGMENTS, lambda x: x)) == 1: - print("*** success on %s" % path) return - print("*** checked %s" % path) assert False, "Flags were not reloaded after 10 seconds" finally: os.remove(path) def test_reloads_modified_file_if_auto_update_is_on(): - print("*** with watchdog") do_auto_update_test({ 'auto_update': True }) def test_reloads_modified_file_in_polling_mode(): - print("*** with polling") do_auto_update_test({ 'auto_update': True, 'force_polling': True, 'poll_interval': 0.1 }) def test_evaluates_full_flag_with_client_as_expected(): From ac5e8de65036434ee0be93dee64c7179a9200b50 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 13 Nov 2018 20:39:44 -0800 Subject: [PATCH 015/190] typo in comment --- ldclient/file_data_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index c4013a52..ebff765b 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -37,7 +37,7 @@ def factory(cls, **kwargs): to `factory`, set `paths` to the file path(s) of your data file(s): :: - factory = FileDataSource.factory(paths: [ myFilePath ]) + factory = FileDataSource.factory(paths = [ myFilePath ]) config = Config(update_processor_class = factory) This will cause the client not to connect to LaunchDarkly to get feature flags. The From 040ced945495c896db7e6eb0a5f259710f2e7113 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 29 Dec 2018 13:27:13 -0800 Subject: [PATCH 016/190] add feature store wrapper class and make Redis feature store use it --- ldclient/feature_store.py | 47 +++++ ldclient/feature_store_helpers.py | 103 +++++++++ ldclient/integrations.py | 31 +++ ldclient/interfaces.py | 145 +++++++++++-- ldclient/redis_feature_store.py | 124 +++++------ testing/test_feature_store.py | 13 +- testing/test_feature_store_helpers.py | 287 ++++++++++++++++++++++++++ 7 files changed, 649 insertions(+), 101 deletions(-) create mode 100644 ldclient/feature_store_helpers.py create mode 100644 ldclient/integrations.py create mode 100644 testing/test_feature_store_helpers.py diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 155743ea..e4d2f667 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -4,6 +4,53 @@ from ldclient.rwlock import ReadWriteLock +class CacheConfig: + """Encapsulates caching parameters for feature store implementations that support local caching. + """ + + DEFAULT_EXPIRATION = 15 + DEFAULT_CAPACITY = 1000 + + def __init__(self, + expiration = DEFAULT_EXPIRATION, + capacity = DEFAULT_CAPACITY): + """Constructs an instance of CacheConfig. + :param float expiration: The cache TTL, in seconds. Items will be evicted from the cache after + this amount of time from the time when they were originally cached. If the time is less than or + equal to zero, caching is disabled. + :param int capacity: The maximum number of items that can be in the cache at a time. + """ + self._expiration = expiration + self._capacity = capacity + + @staticmethod + def default(): + """Returns an instance of CacheConfig with default properties. By default, caching is enabled. + This is the same as calling the constructor with no parameters. + :rtype: CacheConfig + """ + return CacheConfig() + + @staticmethod + def disabled(): + """Returns an instance of CacheConfig specifying that caching should be disabled. + :rtype: CacheConfig + """ + return CacheConfig(expiration = 0) + + @property + def enabled(self): + return self._expiration > 0 + + @property + def expiration(self): + return self._expiration + + @property + def capacity(self): + return self._capacity + + class InMemoryFeatureStore(FeatureStore): """ In-memory implementation of a store that holds feature flags and related data received from the streaming API. diff --git a/ldclient/feature_store_helpers.py b/ldclient/feature_store_helpers.py new file mode 100644 index 00000000..d8359274 --- /dev/null +++ b/ldclient/feature_store_helpers.py @@ -0,0 +1,103 @@ +from expiringdict import ExpiringDict + +from ldclient.interfaces import FeatureStore + + +class CachingStoreWrapper(FeatureStore): + """CachingStoreWrapper is a partial implementation of :class:ldclient.interfaces.FeatureStore that + delegates the basic functionality to an implementation of :class:ldclient.interfaces.FeatureStoreCore - + while adding optional caching behavior and other logic that would otherwise be repeated in every + feature store implementation. This makes it easier to create new database integrations by implementing + only the database-specific logic. + """ + __INITED_CACHE_KEY__ = "$inited" + + def __init__(self, core, cache_config): + self._core = core + if cache_config.enabled: + self._cache = ExpiringDict(max_len=cache_config.capacity, max_age_seconds=cache_config.expiration) + else: + self._cache = None + self._inited = False + + def init(self, all_data): + self._core.init_internal(all_data) + if self._cache is not None: + self._cache.clear() + for kind, items in all_data.items(): + self._cache[self._all_cache_key(kind)] = self._items_if_not_deleted(items) + for key, item in items.items(): + self._cache[self._item_cache_key(kind, key)] = [item] # note array wrapper + self._inited = True + + def get(self, kind, key, callback=lambda x: x): + if self._cache is not None: + cache_key = self._item_cache_key(kind, key) + cached_item = self._cache.get(cache_key) + # note, cached items are wrapped in an array so we can cache None values + if cached_item is not None: + return callback(self._item_if_not_deleted(cached_item[0])) + item = self._core.get_internal(kind, key) + if self._cache is not None: + self._cache[cache_key] = [item] + return callback(self._item_if_not_deleted(item)) + + def all(self, kind, callback): + if self._cache is not None: + cache_key = self._all_cache_key(kind) + cached_items = self._cache.get(cache_key) + if cached_items is not None: + return callback(cached_items) + items = self._items_if_not_deleted(self._core.get_all_internal(kind)) + if self._cache is not None: + self._cache[cache_key] = items + return callback(items) + + def delete(self, kind, key, version): + deleted_item = { "key": key, "version": version, "deleted": True } + self.upsert(kind, deleted_item) + + def upsert(self, kind, item): + new_state = self._core.upsert_internal(kind, item) + if self._cache is not None: + self._cache[self._item_cache_key(kind, item.get('key'))] = [new_state] + self._cache.pop(self._all_cache_key(kind), None) + + @property + def initialized(self): + if self._inited: + return True + if self._cache is None: + result = self._core.initialized_internal() + else: + result = self._cache.get(CachingStoreWrapper.__INITED_CACHE_KEY__) + if result is None: + result = self._core.initialized_internal() + self._cache[CachingStoreWrapper.__INITED_CACHE_KEY__] = result + if result: + self._inited = True + return result + + @staticmethod + def _item_cache_key(kind, key): + return "{0}:{1}".format(kind.namespace, key) + + @staticmethod + def _all_cache_key(kind): + return kind.namespace + + @staticmethod + def _item_if_not_deleted(item): + if item is not None and item.get('deleted', False): + return None + return item + + @staticmethod + def _items_if_not_deleted(items): + results = {} + if items is not None: + for key, item in items.items(): + if not item.get('deleted', False): + results[key] = item + return results + \ No newline at end of file diff --git a/ldclient/integrations.py b/ldclient/integrations.py new file mode 100644 index 00000000..a82783be --- /dev/null +++ b/ldclient/integrations.py @@ -0,0 +1,31 @@ +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.redis_feature_store import _RedisFeatureStoreCore + + +class Redis(object): + """Provides factory methods for integrations between the LaunchDarkly SDK and Redis, + """ + DEFAULT_URL = 'redis://localhost:6379/0' + DEFAULT_PREFIX = 'launchdarkly' + DEFAULT_MAX_CONNECTIONS = 16 + + @staticmethod + def new_feature_store(url=Redis.DEFAULT_URL, + prefix=Redis.DEFAULT_PREFIX, + max_connections=Redis.DEFAULT_MAX_CONNECTIONS, + caching=CacheConfig.default()): + """Creates a Redis-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + + :param string url: The URL of the Redis host; defaults to `DEFAULT_URL` + :param string prefix: A namespace prefix to be prepended to all Redis keys; defaults to + `DEFAULT_PREFIX` + :param int max_connections: The maximum number of Redis connections to keep in the + connection pool; defaults to `DEFAULT_MAX_CONNECTIONS` + :param CacheConfig caching: Specifies whether local caching should be enabled and if so, + sets the cache properties; defaults to `CacheConfig.default()` + """ + core = _RedisFeatureStoreCore(url, prefix, max_connections) + wrapper = CachingStoreWrapper(core, caching) + wrapper.core = core # exposed for testing + return wrapper diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 39898408..2710fa25 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -3,64 +3,86 @@ class FeatureStore(object): """ - Stores and retrieves the state of feature flags and related data + A versioned store for feature flags and related objects received from LaunchDarkly. + Implementations should permit concurrent access and updates. + + An "object", for `FeatureStore`, is simply a dict of arbitrary data which must have at least + three properties: "key" (its unique key), "version" (the version number provided by + LaunchDarkly), and "deleted" (True if this is a placeholder for a deleted object). + + Delete and upsert requests are versioned-- if the version number in the request is less than + the currently stored version of the object, the request should be ignored. + + These semantics support the primary use case for the store, which synchronizes a collection + of objects based on update messages that may be received out-of-order. """ __metaclass__ = ABCMeta @abstractmethod def get(self, kind, key, callback): """ - Gets a feature and calls the callback with the feature data to return the result - :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + Retrieves the object to which the specified key is mapped, or None if the key is not found + or the associated object has a "deleted" property of True. The retrieved object, if any (a + dict) can be transformed by the specified callback. + + :param kind: The kind of object to get :type kind: VersionedDataKind - :param key: The key of the object + :param key: The key whose associated object is to be returned :type key: str - :param callback: The function that accepts the retrieved data and returns a transformed value - :type callback: Function that processes the retrieved object once received. - :return: The result of executing callback. + :param callback: A function that accepts the retrieved data and returns a transformed value + :type callback: function + :return: The result of executing callback """ @abstractmethod - def all(self, callback): + def all(self, kind, callback): """ - Returns all feature flags and their data - :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + Retrieves a dictionary of all associated objects of a given kind. The retrieved dict of keys + to objects can be transformed by the specified callback. + + :param kind: The kind of objects to get :type kind: VersionedDataKind - :param callback: The function that accepts the retrieved data and returns a transformed value - :type callback: Function that processes the retrieved objects once received. - :rtype: The result of executing callback. + :param callback: A function that accepts the retrieved data and returns a transformed value + :type callback: function + :rtype: The result of executing callback """ @abstractmethod def init(self, all_data): """ - Initializes the store with a set of objects. Meant to be called by the UpdateProcessor + Initializes (or re-initializes) the store with the specified set of objects. Any existing entries + will be removed. Implementations can assume that this set of objects is up to date-- there is no + need to perform individual version comparisons between the existing objects and the supplied data. - :param all_data: The features and their data as provided by LD + :param all_data: All objects to be stored :type all_data: dict[VersionedDataKind, dict[str, dict]] """ @abstractmethod def delete(self, kind, key, version): """ - Marks an object as deleted + Deletes the object associated with the specified key, if it exists and its version is less than + the specified version. The object should be replaced in the data store by a + placeholder with the specified version and a "deleted" property of TErue. - :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + :param kind: The kind of object to delete :type kind: VersionedDataKind - :param key: The object key + :param key: The key of the object to be deleted :type key: str - :param version: The version of the object to mark as deleted + :param version: The version for the delete operation :type version: int """ @abstractmethod def upsert(self, kind, item): """ - Inserts an object if its version is newer or missing + Updates or inserts the object associated with the specified key. If an item with the same key + already exists, it should update it only if the new item's version property is greater than + the old one. - :param kind: Denotes which collection to access - one of the constants in versioned_data_kind + :param kind: The kind of object to update :type kind: VersionedDataKind - :param item: The object to be inserted or updated - must have key and version properties + :param item: The object to update or insert :type feature: dict """ @@ -73,6 +95,85 @@ def initialized(self): """ +class FeatureStoreCore(object): + """ + `FeatureStoreCore` is an interface for a simplified subset of the functionality of :class:`FeatureStore`, + to be used in conjunction with :class:`feature_store_helpers.CachingStoreWrapper`. This allows developers + developers of custom `FeatureStore` implementations to avoid repeating logic that would + commonly be needed in any such implementation, such as caching. Instead, they can implement + only `FeatureStoreCore` and then create a `CachingStoreWrapper`. + """ + __metaclass__ = ABCMeta + + @abstractmethod + def get_internal(self, kind, key): + """ + Returns the object to which the specified key is mapped, or None if no such item exists. + The method should not attempt to filter out any items based on their deleted property, + nor to cache any items. + + :param kind: The kind of object to get + :type kind: VersionedDataKind + :param key: The key of the object + :type key: str + :return: The object to which the specified key is mapped, or None + :rtype: dict + """ + + @abstractmethod + def get_all_internal(self, callback): + """ + Returns a dictionary of all associated objects of a given kind. The method should not attempt + to filter out any items based on their deleted property, nor to cache any items. + + :param kind: The kind of objects to get + :type kind: VersionedDataKind + :return: A dictionary of keys to items + :rtype: dict[str, dict] + """ + + @abstractmethod + def init_internal(self, all_data): + """ + Initializes (or re-initializes) the store with the specified set of objects. Any existing entries + will be removed. Implementations can assume that this set of objects is up to date-- there is no + need to perform individual version comparisons between the existing objects and the supplied + data. + + :param all_data: A dictionary of data kinds to item collections + :type all_data: dict[VersionedDataKind, dict[str, dict]] + """ + + @abstractmethod + def upsert_internal(self, kind, item): + """ + Updates or inserts the object associated with the specified key. If an item with the same key + already exists, it should update it only if the new item's version property is greater than + the old one. It should return the final state of the item, i.e. if the update succeeded then + it returns the item that was passed in, and if the update failed due to the version check + then it returns the item that is currently in the data store (this ensures that + `CachingStoreWrapper` will update the cache correctly). + + :param kind: The kind of object to update + :type kind: VersionedDataKind + :param item: The object to update or insert + :type item: dict + :return: The state of the object after the update + :rtype: dict + """ + + @abstractmethod + def initialized_internal(self): + """ + Returns true if this store has been initialized. In a shared data store, it should be able to + detect this even if initInternal was called in a different process, i.e. the test should be + based on looking at what is in the data store. The method does not need to worry about caching + this value; `CachingStoreWrapper` will only call it when necessary. + + :rtype: bool + """ + + class BackgroundOperation(object): """ Performs a task in the background diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index 71b7261b..b9bdf731 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -1,21 +1,20 @@ import json -from pprint import pprint -from expiringdict import ExpiringDict import redis from ldclient import log -from ldclient.interfaces import FeatureStore -from ldclient.memoized_value import MemoizedValue +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.interfaces import FeatureStore, FeatureStoreCore from ldclient.versioned_data_kind import FEATURES -class ForgetfulDict(dict): - def __setitem__(self, key, value): - pass - - class RedisFeatureStore(FeatureStore): + """A Redis-backed implementation of :class:`ldclient.feature_store.FeatureStore`. + + This implementation class is deprecated and may be changed or removed in the future. Please use + :func:`ldclient.integrations.Redis.new_feature_store()`. + """ def __init__(self, url='redis://localhost:6379/0', prefix='launchdarkly', @@ -23,23 +22,42 @@ def __init__(self, expiration=15, capacity=1000): + self.core = _RedisFeatureStoreCore(url, prefix, max_connections) # exposed for testing + self._wrapper = CachingStoreWrapper(self.core, CacheConfig(expiration=expiration, capacity=capacity)) + + def get(self, kind, key, callback = lambda x: x): + return self._wrapper.get(kind, key, callback) + + def all(self, kind, callback): + return self._wrapper.all(kind, callback) + + def init(self, all_data): + return self._wrapper.init(all_data) + + def upsert(self, kind, item): + return self._wrapper.upsert(kind, item) + + def delete(self, kind, key, version): + return self._wrapper.delete(kind, key, version) + + @property + def initialized(self): + return self._wrapper.initialized + + +class _RedisFeatureStoreCore(FeatureStoreCore): + def __init__(self, url, prefix, max_connections): self._prefix = prefix - self._cache = ForgetfulDict() if expiration == 0 else ExpiringDict(max_len=capacity, - max_age_seconds=expiration) self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) - self._inited = MemoizedValue(lambda: self._query_init()) + self.test_update_hook = None # exposed for testing log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + prefix) def _items_key(self, kind): return "{0}:{1}".format(self._prefix, kind.namespace) - def _cache_key(self, kind, key): - return "{0}:{1}".format(kind.namespace, key) - - def init(self, all_data): + def init_internal(self, all_data): pipe = redis.Redis(connection_pool=self._pool).pipeline() - self._cache.clear() all_count = 0 for kind, items in all_data.items(): @@ -48,53 +66,30 @@ def init(self, all_data): for key, item in items.items(): item_json = json.dumps(item) pipe.hset(base_key, key, item_json) - self._cache[self._cache_key(kind, key)] = item all_count = all_count + len(items) - try: - pipe.execute() - except: - self._cache.clear() - raise + pipe.execute() log.info("Initialized RedisFeatureStore with %d items", all_count) - self._inited.set(True) - def all(self, kind, callback): + def get_all_internal(self, kind): r = redis.Redis(connection_pool=self._pool) try: all_items = r.hgetall(self._items_key(kind)) except BaseException as e: log.error("RedisFeatureStore: Could not retrieve '%s' from Redis with error: %s. Returning None.", kind.namespace, e) - return callback(None) + return None if all_items is None or all_items is "": log.warn("RedisFeatureStore: call to get all '%s' returned no results. Returning None.", kind.namespace) - return callback(None) + return None results = {} for key, item_json in all_items.items(): key = key.decode('utf-8') # necessary in Python 3 - item = json.loads(item_json.decode('utf-8')) - if item.get('deleted', False) is False: - results[key] = item - return callback(results) - - def get(self, kind, key, callback=lambda x: x): - item = self._get_even_if_deleted(kind, key, check_cache=True) - if item is not None and item.get('deleted', False) is True: - log.debug("RedisFeatureStore: get returned deleted item %s in '%s'. Returning None.", key, kind.namespace) - return callback(None) - return callback(item) - - def _get_even_if_deleted(self, kind, key, check_cache = True): - cacheKey = self._cache_key(kind, key) - if check_cache: - item = self._cache.get(cacheKey) - if item is not None: - # reset ttl - self._cache[cacheKey] = item - return item + results[key] = json.loads(item_json.decode('utf-8')) + return results + def get_internal(self, kind, key): try: r = redis.Redis(connection_pool=self._pool) item_json = r.hget(self._items_key(kind), key) @@ -107,26 +102,9 @@ def _get_even_if_deleted(self, kind, key, check_cache = True): log.debug("RedisFeatureStore: key %s not found in '%s'. Returning None.", key, kind.namespace) return None - item = json.loads(item_json.decode('utf-8')) - self._cache[cacheKey] = item - return item - - def delete(self, kind, key, version): - deleted_item = { "key": key, "version": version, "deleted": True } - self._update_with_versioning(kind, deleted_item) - - def upsert(self, kind, item): - self._update_with_versioning(kind, item) - - @property - def initialized(self): - return self._inited.get() - - def _query_init(self): - r = redis.Redis(connection_pool=self._pool) - return r.exists(self._items_key(FEATURES)) + return json.loads(item_json.decode('utf-8')) - def _update_with_versioning(self, kind, item): + def upsert_internal(self, kind, item): r = redis.Redis(connection_pool=self._pool) base_key = self._items_key(kind) key = item['key'] @@ -135,14 +113,15 @@ def _update_with_versioning(self, kind, item): while True: pipeline = r.pipeline() pipeline.watch(base_key) - old = self._get_even_if_deleted(kind, key, check_cache=False) - self._before_update_transaction(base_key, key) + old = self.get_internal(kind, key) + if self.test_update_hook is not None: + self.test_update_hook(base_key, key) if old and old['version'] >= item['version']: log.debug('RedisFeatureStore: Attempted to %s key: %s version %d with a version that is the same or older: %d in "%s"', 'delete' if item.get('deleted') else 'update', key, old['version'], item['version'], kind.namespace) pipeline.unwatch() - break + return old else: pipeline.multi() pipeline.hset(base_key, key, item_json) @@ -153,8 +132,11 @@ def _update_with_versioning(self, kind, item): except redis.exceptions.WatchError: log.debug("RedisFeatureStore: concurrent modification detected, retrying") continue - self._cache[self._cache_key(kind, key)] = item - break + return item + + def initialized_internal(self): + r = redis.Redis(connection_pool=self._pool) + return r.exists(self._items_key(FEATURES)) def _before_update_transaction(self, base_key, key): # exposed for testing diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 245341ec..b8696529 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,5 +1,4 @@ import json -from mock import patch import pytest import redis @@ -133,8 +132,7 @@ def test_upsert_older_version_after_delete(self, store): class TestRedisFeatureStoreExtraTests: - @patch.object(RedisFeatureStore, '_before_update_transaction') - def test_upsert_race_condition_against_external_client_with_higher_version(self, mock_method): + def test_upsert_race_condition_against_external_client_with_higher_version(self): other_client = redis.StrictRedis(host='localhost', port=6379, db=0) store = RedisFeatureStore() store.init({ FEATURES: {} }) @@ -144,7 +142,7 @@ def hook(base_key, key): if other_version['version'] <= 4: other_client.hset(base_key, key, json.dumps(other_version)) other_version['version'] = other_version['version'] + 1 - mock_method.side_effect = hook + store.core.test_update_hook = hook feature = { u'key': 'flagkey', u'version': 1 } @@ -152,8 +150,7 @@ def hook(base_key, key): result = store.get(FEATURES, 'flagkey', lambda x: x) assert result['version'] == 2 - @patch.object(RedisFeatureStore, '_before_update_transaction') - def test_upsert_race_condition_against_external_client_with_lower_version(self, mock_method): + def test_upsert_race_condition_against_external_client_with_lower_version(self): other_client = redis.StrictRedis(host='localhost', port=6379, db=0) store = RedisFeatureStore() store.init({ FEATURES: {} }) @@ -163,7 +160,7 @@ def hook(base_key, key): if other_version['version'] <= 4: other_client.hset(base_key, key, json.dumps(other_version)) other_version['version'] = other_version['version'] + 1 - mock_method.side_effect = hook + store.core.test_update_hook = hook feature = { u'key': 'flagkey', u'version': 5 } @@ -186,7 +183,7 @@ def test_exception_is_handled_in_all(self, caplog): # This just verifies the fix for a bug that caused an error during exception handling in Python 3 store = RedisFeatureStore(url='redis://bad') all = store.all(FEATURES, lambda x: x) - assert all is None + assert all == {} loglines = get_log_lines(caplog) assert len(loglines) == 2 message = loglines[1].message diff --git a/testing/test_feature_store_helpers.py b/testing/test_feature_store_helpers.py new file mode 100644 index 00000000..01bb245a --- /dev/null +++ b/testing/test_feature_store_helpers.py @@ -0,0 +1,287 @@ +import pytest +from time import sleep + +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.versioned_data_kind import VersionedDataKind + +THINGS = VersionedDataKind(namespace = "things", request_api_path = "", stream_api_path = "") +WRONG_THINGS = VersionedDataKind(namespace = "wrong", request_api_path = "", stream_api_path = "") + +def make_wrapper(core, cached): + return CachingStoreWrapper(core, CacheConfig(expiration=30) if cached else CacheConfig.disabled()) + +class MockCore: + def __init__(self): + self.data = {} + self.inited = False + self.inited_query_count = 0 + + def init_internal(self, all_data): + self.data = {} + for kind, items in all_data.items(): + self.data[kind] = items.copy() + + def get_internal(self, kind, key): + items = self.data.get(kind) + return None if items is None else items.get(key) + + def get_all_internal(self, kind): + return self.data.get(kind) + + def upsert_internal(self, kind, item): + key = item.get('key') + items = self.data.get(kind) + if items is None: + items = {} + self.data[kind] = items + old_item = items.get(key) + if old_item is None or old_item.get('version') < item.get('version'): + items[key] = item + return item + return old_item + + def initialized_internal(self): + self.inited_query_count = self.inited_query_count + 1 + return self.inited + + def force_set(self, kind, item): + items = self.data.get(kind) + if items is None: + items = {} + self.data[kind] = items + items[item.get('key')] = item + + def force_remove(self, kind, key): + items = self.data.get(kind) + if items is not None: + items.pop(key, None) + +class TestCachingStoreWrapper: + @pytest.mark.parametrize("cached", [False, True]) + def test_get_item(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + itemv1 = { "key": key, "version": 1 } + itemv2 = { "key": key, "version": 2 } + + core.force_set(THINGS, itemv1) + assert wrapper.get(THINGS, key) == itemv1 + + core.force_set(THINGS, itemv2) + assert wrapper.get(THINGS, key) == (itemv1 if cached else itemv2) # if cached, we will not see the new underlying value yet + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_deleted_item(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + itemv1 = { "key": key, "version": 1, "deleted": True } + itemv2 = { "key": key, "version": 2 } + + core.force_set(THINGS, itemv1) + assert wrapper.get(THINGS, key) is None # item is filtered out because deleted is true + + core.force_set(THINGS, itemv2) + assert wrapper.get(THINGS, key) == (None if cached else itemv2) # if cached, we will not see the new underlying value yet + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_missing_item(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + item = { "key": key, "version": 1 } + + assert wrapper.get(THINGS, key) is None + + core.force_set(THINGS, item) + assert wrapper.get(THINGS, key) == (None if cached else item) # the cache can retain a nil result + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_with_lambda(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + item = { "key": key, "version": 1 } + modified_item = { "key": key, "version": 99 } + + core.force_set(THINGS, item) + assert wrapper.get(THINGS, key, lambda x: modified_item) == modified_item + + def test_cached_get_uses_values_from_init(self): + core = MockCore() + wrapper = make_wrapper(core, True) + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1 } + + wrapper.init({ THINGS: { item1["key"]: item1, item2["key"]: item2 } }) + core.force_remove(THINGS, item1["key"]) + assert wrapper.get(THINGS, item1["key"]) == item1 + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1 } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1, item2["key"]: item2 } + + core.force_remove(THINGS, item2["key"]) + if cached: + assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1, item2["key"]: item2 } + else: + assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1 } + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all_removes_deleted_items(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1, "deleted": True } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1 } + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all_changes_None_to_empty_dict(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + + assert wrapper.all(WRONG_THINGS, lambda x:x) == {} + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all_iwith_lambda(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + extra = { "extra": True } + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1 } + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + assert wrapper.all(THINGS, lambda x: dict(x, **extra)) == { + item1["key"]: item1, item2["key"]: item2, "extra": True + } + + def test_cached_get_all_uses_values_from_init(self): + core = MockCore() + wrapper = make_wrapper(core, True) + item1 = { "key": "flag1", "version": 1 } + item2 = { "key": "flag2", "version": 1 } + both = { item1["key"]: item1, item2["key"]: item2 } + + wrapper.init({ THINGS: both }) + core.force_remove(THINGS, item1["key"]) + assert wrapper.all(THINGS, lambda x: x) == both + + @pytest.mark.parametrize("cached", [False, True]) + def test_upsert_successful(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + itemv1 = { "key": key, "version": 1 } + itemv2 = { "key": key, "version": 2 } + + wrapper.upsert(THINGS, itemv1) + assert core.data[THINGS][key] == itemv1 + + wrapper.upsert(THINGS, itemv2) + assert core.data[THINGS][key] == itemv2 + + # if we have a cache, verify that the new item is now cached by writing a different value + # to the underlying data - Get should still return the cached item + if cached: + itemv3 = { "key": key, "version": 3 } + core.force_set(THINGS, itemv3) + + assert wrapper.get(THINGS, key) == itemv2 + + def test_cached_upsert_unsuccessful(self): + # This is for an upsert where the data in the store has a higher version. In an uncached + # store, this is just a no-op as far as the wrapper is concerned so there's nothing to + # test here. In a cached store, we need to verify that the cache has been refreshed + # using the data that was found in the store. + core = MockCore() + wrapper = make_wrapper(core, True) + key = "flag" + itemv1 = { "key": key, "version": 1 } + itemv2 = { "key": key, "version": 2 } + + wrapper.upsert(THINGS, itemv2) + assert core.data[THINGS][key] == itemv2 + + wrapper.upsert(THINGS, itemv1) + assert core.data[THINGS][key] == itemv2 # value in store remains the same + + itemv3 = { "key": key, "version": 3 } + core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache + assert wrapper.get(THINGS, key) == itemv2 + + @pytest.mark.parametrize("cached", [False, True]) + def test_delete(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + key = "flag" + itemv1 = { "key": key, "version": 1 } + itemv2 = { "key": key, "version": 2, "deleted": True } + itemv3 = { "key": key, "version": 3 } + + core.force_set(THINGS, itemv1) + assert wrapper.get(THINGS, key) == itemv1 + + wrapper.delete(THINGS, key, 2) + assert core.data[THINGS][key] == itemv2 + + core.force_set(THINGS, itemv3) # make a change that bypasses the cache + assert wrapper.get(THINGS, key) == (None if cached else itemv3) + + def test_uncached_initialized_queries_state_only_until_inited(self): + core = MockCore() + wrapper = make_wrapper(core, False) + + assert wrapper.initialized is False + assert core.inited_query_count == 1 + + core.inited = True + assert wrapper.initialized is True + assert core.inited_query_count == 2 + + core.inited = False + assert wrapper.initialized is True + assert core.inited_query_count == 2 + + def test_uncached_initialized_does_not_query_state_if_init_was_called(self): + core = MockCore() + wrapper = make_wrapper(core, False) + + assert wrapper.initialized is False + assert core.inited_query_count == 1 + + wrapper.init({}) + + assert wrapper.initialized is True + assert core.inited_query_count == 1 + + def test_cached_initialized_can_cache_false_result(self): + core = MockCore() + wrapper = CachingStoreWrapper(core, CacheConfig(expiration=0.2)) # use a shorter cache TTL for this test + + assert wrapper.initialized is False + assert core.inited_query_count == 1 + + core.inited = True + assert wrapper.initialized is False + assert core.inited_query_count == 1 + + sleep(0.5) + + assert wrapper.initialized is True + assert core.inited_query_count == 2 + + # From this point on it should remain true and the method should not be called + assert wrapper.initialized is True + assert core.inited_query_count == 2 From 59a67a844b1650eb7a7600a1d44ca120a8f03a72 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 29 Dec 2018 13:39:42 -0800 Subject: [PATCH 017/190] test the new Redis factory method --- ldclient/integrations.py | 6 +++--- testing/test_feature_store.py | 23 +++++++++++++++++------ 2 files changed, 20 insertions(+), 9 deletions(-) diff --git a/ldclient/integrations.py b/ldclient/integrations.py index a82783be..86b5248d 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -11,9 +11,9 @@ class Redis(object): DEFAULT_MAX_CONNECTIONS = 16 @staticmethod - def new_feature_store(url=Redis.DEFAULT_URL, - prefix=Redis.DEFAULT_PREFIX, - max_connections=Redis.DEFAULT_MAX_CONNECTIONS, + def new_feature_store(url='redis://localhost:6379/0', + prefix='launchdarkly', + max_connections=16, caching=CacheConfig.default()): """Creates a Redis-backed implementation of `:class:ldclient.feature_store.FeatureStore`. diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index b8696529..5716fa0e 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -2,7 +2,8 @@ import pytest import redis -from ldclient.feature_store import InMemoryFeatureStore +from ldclient.feature_store import CacheConfig, InMemoryFeatureStore +from ldclient.integrations import Redis from ldclient.redis_feature_store import RedisFeatureStore from ldclient.versioned_data_kind import FEATURES @@ -19,17 +20,27 @@ class TestFeatureStore: redis_host = 'localhost' redis_port = 6379 + def clear_redis_data(self): + r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) + r.delete("launchdarkly:features") + def in_memory(self): return InMemoryFeatureStore() def redis_with_local_cache(self): - r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) - r.delete("launchdarkly:features") - return RedisFeatureStore() + self.clear_redis_data() + return Redis.new_feature_store() def redis_no_local_cache(self): - r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) - r.delete("launchdarkly:features") + self.clear_redis_data() + return Redis.new_feature_store(caching=CacheConfig.disabled()) + + def deprecated_redis_with_local_cache(self): + self.clear_redis_data() + return RedisFeatureStore() + + def deprecated_redis_no_local_cache(self): + self.clear_redis_data() return RedisFeatureStore(expiration=0) params = [in_memory, redis_with_local_cache, redis_no_local_cache] From 1e38ac10afceb7a4b34ada8351e4c9552070f563 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 29 Dec 2018 15:22:39 -0800 Subject: [PATCH 018/190] add DynamoDB support --- .circleci/config.yml | 6 + dynamodb-requirements.txt | 1 + ldclient/dynamodb_feature_store.py | 191 +++++++++++++++++++++++++++++ ldclient/integrations.py | 25 +++- ldclient/redis_feature_store.py | 11 +- test-requirements.txt | 1 + testing/test_feature_store.py | 134 ++++++++++++++++---- 7 files changed, 345 insertions(+), 24 deletions(-) create mode 100644 dynamodb-requirements.txt create mode 100644 ldclient/dynamodb_feature_store.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 05cb973c..92699a3c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -40,28 +40,34 @@ jobs: docker: - image: circleci/python:2.7-jessie - image: redis + - image: amazon/dynamodb-local test-3.3: <<: *test-template docker: - image: circleci/python:3.3-jessie - image: redis + - image: amazon/dynamodb-local test-3.4: <<: *test-template docker: - image: circleci/python:3.4-jessie - image: redis + - image: amazon/dynamodb-local test-3.5: <<: *test-template docker: - image: circleci/python:3.5-jessie - image: redis + - image: amazon/dynamodb-local test-3.6: <<: *test-template docker: - image: circleci/python:3.6-jessie - image: redis + - image: amazon/dynamodb-local test-3.7: <<: *test-template docker: - image: circleci/python:3.7-stretch - image: redis + - image: amazon/dynamodb-local diff --git a/dynamodb-requirements.txt b/dynamodb-requirements.txt new file mode 100644 index 00000000..b72b66b6 --- /dev/null +++ b/dynamodb-requirements.txt @@ -0,0 +1 @@ +boto3>=1.9.71 diff --git a/ldclient/dynamodb_feature_store.py b/ldclient/dynamodb_feature_store.py new file mode 100644 index 00000000..f3879d71 --- /dev/null +++ b/ldclient/dynamodb_feature_store.py @@ -0,0 +1,191 @@ +import json + +have_dynamodb = False +try: + import boto3 + have_dynamodb = True +except ImportError: + pass + +from ldclient import log +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.interfaces import FeatureStore, FeatureStoreCore + +# +# Internal implementation of the DynamoDB feature store. +# +# Implementation notes: +# +# * Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish +# to store, are all put in the same table. The only two required attributes are "key" (which +# is present in all storeable entities) and "namespace" (a parameter from the client that is +# used to disambiguate between flags and segments). +# +# * Because of DynamoDB's restrictions on attribute values (e.g. empty strings are not +# allowed), the standard DynamoDB marshaling mechanism with one attribute per object property +# is not used. Instead, the entire object is serialized to JSON and stored in a single +# attribute, "item". The "version" property is also stored as a separate attribute since it +# is used for updates. +# +# * Since DynamoDB doesn't have transactions, the init() method - which replaces the entire data +# store - is not atomic, so there can be a race condition if another process is adding new data +# via upsert(). To minimize this, we don't delete all the data at the start; instead, we update +# the items we've received, and then delete all other items. That could potentially result in +# deleting new data from another process, but that would be the case anyway if the init() +# happened to execute later than the upsert(); we are relying on the fact that normally the +# process that did the init() will also receive the new data shortly and do its own upsert(). +# +# * DynamoDB has a maximum item size of 400KB. Since each feature flag or user segment is +# stored as a single item, this mechanism will not work for extremely large flags or segments. +# + +class _DynamoDBFeatureStoreCore(FeatureStoreCore): + PARTITION_KEY = 'namespace' + SORT_KEY = 'key' + VERSION_ATTRIBUTE = 'version' + ITEM_JSON_ATTRIBUTE = 'item' + + def __init__(self, table_name, prefix, dynamodb_opts): + if not have_dynamodb: + raise NotImplementedError("Cannot use DynamoDB feature store because AWS SDK (boto3 package) is not installed") + self._table_name = table_name + self._prefix = None if prefix == "" else prefix + self._client = boto3.client('dynamodb', **dynamodb_opts) + + def init_internal(self, all_data): + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + unused_old_keys = self._read_existing_keys(all_data.keys()) + requests = [] + num_items = 0 + inited_key = self._inited_key() + + # Insert or update every provided item + for kind, items in all_data.items(): + for key, item in items.items(): + encoded_item = self._marshal_item(kind, item) + requests.append({ 'PutRequest': { 'Item': encoded_item } }) + combined_key = (self._namespace_for_kind(kind), key) + unused_old_keys.discard(combined_key) + num_items = num_items + 1 + + # Now delete any previously existing items whose keys were not in the current data + for combined_key in unused_old_keys: + if combined_key[0] != inited_key: + requests.append({ 'DeleteRequest': { 'Key': self._make_keys(combined_key[0], combined_key[1]) } }) + + # Now set the special key that we check in initialized_internal() + requests.append({ 'PutRequest': { 'Item': self._make_keys(inited_key, inited_key) } }) + + _DynamoDBHelpers.batch_write_requests(self._client, self._table_name, requests) + log.info('Initialized table %s with %d items', self._table_name, num_items) + + def get_internal(self, kind, key): + resp = self._get_item_by_keys(self._namespace_for_kind(kind), key) + return self._unmarshal_item(resp.get('Item')) + + def get_all_internal(self, kind): + items_out = {} + paginator = self._client.get_paginator('query') + for resp in paginator.paginate(**self._make_query_for_kind(kind)): + for item in resp['Items']: + item_out = self._unmarshal_item(item) + items_out[item_out['key']] = item_out + return items_out + + def upsert_internal(self, kind, item): + encoded_item = self._marshal_item(kind, item) + try: + req = { + 'TableName': self._table_name, + 'Item': encoded_item, + 'ConditionExpression': 'attribute_not_exists(#namespace) or attribute_not_exists(#key) or :version > #version', + 'ExpressionAttributeNames': { + '#namespace': self.PARTITION_KEY, + '#key': self.SORT_KEY, + '#version': self.VERSION_ATTRIBUTE + }, + 'ExpressionAttributeValues': { + ':version': { 'N': str(item['version']) } + } + } + self._client.put_item(**req) + except self._client.exceptions.ConditionalCheckFailedException: + # The item was not updated because there's a newer item in the database. We must now + # read the item that's in the database and return it, so CachingStoreWrapper can cache it. + return self.get_internal(kind, item['key']) + return item + + def initialized_internal(self): + resp = self._get_item_by_keys(self._inited_key(), self._inited_key()) + return resp.get('Item') is not None and len(resp['Item']) > 0 + + def _prefixed_namespace(self, base): + return base if self._prefix is None else (self._prefix + ':' + base) + + def _namespace_for_kind(self, kind): + return self._prefixed_namespace(kind.namespace) + + def _inited_key(self): + return self._prefixed_namespace('$inited') + + def _make_keys(self, namespace, key): + return { + self.PARTITION_KEY: { 'S': namespace }, + self.SORT_KEY: { 'S': key } + } + + def _make_query_for_kind(self, kind): + return { + 'TableName': self._table_name, + 'ConsistentRead': True, + 'KeyConditions': { + self.PARTITION_KEY: { + 'AttributeValueList': [ + { 'S': self._namespace_for_kind(kind) } + ], + 'ComparisonOperator': 'EQ' + } + } + } + + def _get_item_by_keys(self, namespace, key): + return self._client.get_item(TableName=self._table_name, Key=self._make_keys(namespace, key)) + + def _read_existing_keys(self, kinds): + keys = set() + for kind in kinds: + req = self._make_query_for_kind(kind) + req['ProjectionExpression'] = '#namespace, #key' + req['ExpressionAttributeNames'] = { + '#namespace': self.PARTITION_KEY, + '#key': self.SORT_KEY + } + paginator = self._client.get_paginator('query') + for resp in paginator.paginate(**req): + for item in resp['Items']: + namespace = item[self.PARTITION_KEY]['S'] + key = item[self.SORT_KEY]['S'] + keys.add((namespace, key)) + return keys + + def _marshal_item(self, kind, item): + json_str = json.dumps(item) + ret = self._make_keys(self._namespace_for_kind(kind), item['key']) + ret[self.VERSION_ATTRIBUTE] = { 'N': str(item['version']) } + ret[self.ITEM_JSON_ATTRIBUTE] = { 'S': json_str } + return ret + + def _unmarshal_item(self, item): + if item is None: + return None + json_attr = item.get(self.ITEM_JSON_ATTRIBUTE) + return None if json_attr is None else json.loads(json_attr['S']) + + +class _DynamoDBHelpers(object): + @staticmethod + def batch_write_requests(client, table_name, requests): + batch_size = 25 + for batch in (requests[i:i+batch_size] for i in xrange(0, len(requests), batch_size)): + client.batch_write_item(RequestItems={ table_name: batch }) diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 86b5248d..80063389 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -1,10 +1,33 @@ from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore from ldclient.redis_feature_store import _RedisFeatureStoreCore +class DynamoDB(object): + """Provides factory methods for integrations between the LaunchDarkly SDK and DynamoDB. + """ + + @staticmethod + def new_feature_store(table_name, + prefix=None, + dynamodb_opts={}, + caching=CacheConfig.default()): + """Creates a DynamoDB-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + + :param string table_name: The name of an existing DynamoDB table + :param string prefix: An optional namespace prefix to be prepended to all Redis keys + :param dict dynamodb_opts: Optional parameters for configuring the DynamoDB client, as defined in + the boto3 API + :param CacheConfig caching: Specifies whether local caching should be enabled and if so, + sets the cache properties; defaults to `CacheConfig.default()` + """ + core = _DynamoDBFeatureStoreCore(table_name, prefix, dynamodb_opts) + return CachingStoreWrapper(core, caching) + + class Redis(object): - """Provides factory methods for integrations between the LaunchDarkly SDK and Redis, + """Provides factory methods for integrations between the LaunchDarkly SDK and Redis. """ DEFAULT_URL = 'redis://localhost:6379/0' DEFAULT_PREFIX = 'launchdarkly' diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index b9bdf731..02df0e57 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -1,6 +1,11 @@ import json -import redis +have_redis = False +try: + import redis + have_redis = True +except ImportError: + pass from ldclient import log from ldclient.feature_store import CacheConfig @@ -21,7 +26,8 @@ def __init__(self, max_connections=16, expiration=15, capacity=1000): - + if not have_redis: + raise NotImplementedError("Cannot use Redis feature store because redis package is not installed") self.core = _RedisFeatureStoreCore(url, prefix, max_connections) # exposed for testing self._wrapper = CachingStoreWrapper(self.core, CacheConfig(expiration=expiration, capacity=capacity)) @@ -47,6 +53,7 @@ def initialized(self): class _RedisFeatureStoreCore(FeatureStoreCore): def __init__(self, url, prefix, max_connections): + self._prefix = prefix self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) self.test_update_hook = None # exposed for testing diff --git a/test-requirements.txt b/test-requirements.txt index 413ef355..88cbbc2e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,6 +1,7 @@ mock>=2.0.0 pytest>=2.8 redis>=2.10.5 +boto3>=1.9.71 coverage>=4.4 pytest-capturelog>=0.7 pytest-cov>=2.4.0 diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 5716fa0e..003434b1 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,9 +1,12 @@ +import boto3 import json import pytest import redis +import time +from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore, _DynamoDBHelpers from ldclient.feature_store import CacheConfig, InMemoryFeatureStore -from ldclient.integrations import Redis +from ldclient.integrations import DynamoDB, Redis from ldclient.redis_feature_store import RedisFeatureStore from ldclient.versioned_data_kind import FEATURES @@ -16,38 +19,124 @@ def get_log_lines(caplog): return loglines -class TestFeatureStore: +class InMemoryTester(object): + def init_store(self): + return InMemoryFeatureStore() + + +class RedisTester(object): redis_host = 'localhost' redis_port = 6379 - def clear_redis_data(self): + def __init__(self, cache_config): + self._cache_config = cache_config + + def init_store(self): + self._clear_data() + return Redis.new_feature_store(caching=self._cache_config) + + def _clear_data(self): r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) r.delete("launchdarkly:features") - def in_memory(self): - return InMemoryFeatureStore() - def redis_with_local_cache(self): - self.clear_redis_data() - return Redis.new_feature_store() - - def redis_no_local_cache(self): - self.clear_redis_data() - return Redis.new_feature_store(caching=CacheConfig.disabled()) - - def deprecated_redis_with_local_cache(self): - self.clear_redis_data() - return RedisFeatureStore() +class RedisWithDeprecatedConstructorTester(RedisTester): + def init_store(self): + self._clear_data() + return RedisFeatureStore(expiration=(30 if self._cache_config.enabled else 0)) + + +class DynamoDBTester(object): + table_name = 'LD_DYNAMODB_TEST_TABLE' + table_created = False + options = { 'endpoint_url': 'http://localhost:8000', 'region_name': 'us-east-1' } + + def __init__(self, cache_config): + self._cache_config = cache_config + + def init_store(self): + self._create_table() + self._clear_data() + return DynamoDB.new_feature_store(self.table_name, dynamodb_opts=self.options) + + def _create_table(self): + if self.table_created: + return + client = boto3.client('dynamodb', **self.options) + try: + client.describe_table(TableName=self.table_name) + self.table_created = True + return + except client.exceptions.ResourceNotFoundException: + pass + req = { + 'TableName': self.table_name, + 'KeySchema': [ + { + 'AttributeName': _DynamoDBFeatureStoreCore.PARTITION_KEY, + 'KeyType': 'HASH', + }, + { + 'AttributeName': _DynamoDBFeatureStoreCore.SORT_KEY, + 'KeyType': 'RANGE' + } + ], + 'AttributeDefinitions': [ + { + 'AttributeName': _DynamoDBFeatureStoreCore.PARTITION_KEY, + 'AttributeType': 'S' + }, + { + 'AttributeName': _DynamoDBFeatureStoreCore.SORT_KEY, + 'AttributeType': 'S' + } + ], + 'ProvisionedThroughput': { + 'ReadCapacityUnits': 1, + 'WriteCapacityUnits': 1 + } + } + client.create_table(**req) + while True: + try: + client.describe_table(TableName=self.table_name) + self.table_created = True + return + except client.exceptions.ResourceNotFoundException: + time.sleep(0.5) + + def _clear_data(self): + client = boto3.client('dynamodb', **self.options) + delete_requests = [] + req = { + 'TableName': self.table_name, + 'ConsistentRead': True, + 'ProjectionExpression': '#namespace, #key', + 'ExpressionAttributeNames': { + '#namespace': _DynamoDBFeatureStoreCore.PARTITION_KEY, + '#key': _DynamoDBFeatureStoreCore.SORT_KEY + } + } + for resp in client.get_paginator('scan').paginate(**req): + for item in resp['Items']: + delete_requests.append({ 'DeleteRequest': { 'Key': item } }) + _DynamoDBHelpers.batch_write_requests(client, self.table_name, delete_requests) - def deprecated_redis_no_local_cache(self): - self.clear_redis_data() - return RedisFeatureStore(expiration=0) - params = [in_memory, redis_with_local_cache, redis_no_local_cache] +class TestFeatureStore: + params = [ + InMemoryTester(), + RedisTester(CacheConfig.default()), + RedisTester(CacheConfig.disabled()), + RedisWithDeprecatedConstructorTester(CacheConfig.default()), + RedisWithDeprecatedConstructorTester(CacheConfig.disabled()), + DynamoDBTester(CacheConfig.default()), + DynamoDBTester(CacheConfig.disabled()) + ] @pytest.fixture(params=params) def store(self, request): - return request.param(self) + return request.param.init_store() @staticmethod def make_feature(key, ver): @@ -79,6 +168,9 @@ def base_initialized_store(self, store): }) return store + def test_not_initialized_before_init(self, store): + assert store.initialized is False + def test_initialized(self, store): store = self.base_initialized_store(store) assert store.initialized is True From 431dddf55ea9bdc16d1e15d680e519287ed14723 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 29 Dec 2018 15:25:52 -0800 Subject: [PATCH 019/190] add test credentials --- testing/test_feature_store.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 003434b1..229a0f40 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -49,7 +49,12 @@ def init_store(self): class DynamoDBTester(object): table_name = 'LD_DYNAMODB_TEST_TABLE' table_created = False - options = { 'endpoint_url': 'http://localhost:8000', 'region_name': 'us-east-1' } + options = { + 'aws_access_key_id': 'key', # not used by local DynamoDB, but still required + 'aws_secret_access_key': 'secret', + 'endpoint_url': 'http://localhost:8000', + 'region_name': 'us-east-1' + } def __init__(self, cache_config): self._cache_config = cache_config From 3aa5644edf5c5f65f201733c20bb21e924fd10ef Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 11:34:53 -0800 Subject: [PATCH 020/190] link in comment --- ldclient/integrations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 80063389..6102d354 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -18,7 +18,7 @@ def new_feature_store(table_name, :param string table_name: The name of an existing DynamoDB table :param string prefix: An optional namespace prefix to be prepended to all Redis keys :param dict dynamodb_opts: Optional parameters for configuring the DynamoDB client, as defined in - the boto3 API + the boto3 API; see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client :param CacheConfig caching: Specifies whether local caching should be enabled and if so, sets the cache properties; defaults to `CacheConfig.default()` """ From bd00276f874d40d1a5d1f2c66e033cd99452f00c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 11:36:13 -0800 Subject: [PATCH 021/190] comment --- ldclient/redis_feature_store.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index b9bdf731..e08af6dc 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -9,6 +9,11 @@ from ldclient.versioned_data_kind import FEATURES +# Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating +# to _RedisFeatureStoreCore where the actual database logic is. This class was retained for historical +# reasons, to support existing code that calls the RedisFeatureStore constructor. In the future, we +# will migrate away from exposing these concrete classes and use only the factory methods. + class RedisFeatureStore(FeatureStore): """A Redis-backed implementation of :class:`ldclient.feature_store.FeatureStore`. From 534ec5deadb46e318a18e7bc80431f2bc531a639 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 12:48:27 -0800 Subject: [PATCH 022/190] don't catch exceptions in Redis feature store, let the client catch them --- ldclient/client.py | 15 +++++-- ldclient/feature_store_helpers.py | 6 +-- ldclient/interfaces.py | 4 +- ldclient/redis_feature_store.py | 19 ++------- testing/test_feature_store.py | 30 -------------- testing/test_feature_store_helpers.py | 59 +++++++++++++++++++++++---- testing/test_ldclient_evaluation.py | 56 +++++++++++++++++++++++++ 7 files changed, 128 insertions(+), 61 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 039fad52..eea7d970 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -243,7 +243,14 @@ def send_event(value, variation=None, flag=None, reason=None): if user is not None and user.get('key', "") == "": log.warn("User key is blank. Flag evaluation will proceed, but the user will not be stored in LaunchDarkly.") - flag = self._store.get(FEATURES, key, lambda x: x) + try: + flag = self._store.get(FEATURES, key, lambda x: x) + except Exception as e: + log.error("Unexpected error while retrieving feature flag \"%s\": %s" % (key, repr(e))) + log.debug(traceback.format_exc()) + reason = error_reason('EXCEPTION') + send_event(default, None, None, reason) + return EvaluationDetail(default, None, reason) if not flag: reason = error_reason('FLAG_NOT_FOUND') send_event(default, None, None, reason) @@ -264,7 +271,7 @@ def send_event(value, variation=None, flag=None, reason=None): send_event(detail.value, detail.variation_index, flag, detail.reason) return detail except Exception as e: - log.error("Unexpected error while evaluating feature flag \"%s\": %s" % (key, e)) + log.error("Unexpected error while evaluating feature flag \"%s\": %s" % (key, repr(e))) log.debug(traceback.format_exc()) reason = error_reason('EXCEPTION') send_event(default, None, flag, reason) @@ -328,7 +335,7 @@ def all_flags_state(self, user, **kwargs): if flags_map is None: raise ValueError("feature store error") except Exception as e: - log.error("Unable to read flags for all_flag_state: %s" % e) + log.error("Unable to read flags for all_flag_state: %s" % repr(e)) return FeatureFlagsState(False) for key, flag in flags_map.items(): @@ -339,7 +346,7 @@ def all_flags_state(self, user, **kwargs): state.add_flag(flag, detail.value, detail.variation_index, detail.reason if with_reasons else None, details_only_if_tracked) except Exception as e: - log.error("Error evaluating flag \"%s\" in all_flags_state: %s" % (key, e)) + log.error("Error evaluating flag \"%s\" in all_flags_state: %s" % (key, repr(e))) log.debug(traceback.format_exc()) reason = {'kind': 'ERROR', 'errorKind': 'EXCEPTION'} state.add_flag(flag, None, None, reason if with_reasons else None, details_only_if_tracked) diff --git a/ldclient/feature_store_helpers.py b/ldclient/feature_store_helpers.py index d8359274..2ba83713 100644 --- a/ldclient/feature_store_helpers.py +++ b/ldclient/feature_store_helpers.py @@ -42,7 +42,7 @@ def get(self, kind, key, callback=lambda x: x): self._cache[cache_key] = [item] return callback(self._item_if_not_deleted(item)) - def all(self, kind, callback): + def all(self, kind, callback=lambda x: x): if self._cache is not None: cache_key = self._all_cache_key(kind) cached_items = self._cache.get(cache_key) @@ -68,11 +68,11 @@ def initialized(self): if self._inited: return True if self._cache is None: - result = self._core.initialized_internal() + result = bool(self._core.initialized_internal()) else: result = self._cache.get(CachingStoreWrapper.__INITED_CACHE_KEY__) if result is None: - result = self._core.initialized_internal() + result = bool(self._core.initialized_internal()) self._cache[CachingStoreWrapper.__INITED_CACHE_KEY__] = result if result: self._inited = True diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 2710fa25..9556bdfc 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -19,7 +19,7 @@ class FeatureStore(object): __metaclass__ = ABCMeta @abstractmethod - def get(self, kind, key, callback): + def get(self, kind, key, callback=lambda x: x): """ Retrieves the object to which the specified key is mapped, or None if the key is not found or the associated object has a "deleted" property of True. The retrieved object, if any (a @@ -35,7 +35,7 @@ def get(self, kind, key, callback): """ @abstractmethod - def all(self, kind, callback): + def all(self, kind, callback=lambda x: x): """ Retrieves a dictionary of all associated objects of a given kind. The retrieved dict of keys to objects can be transformed by the specified callback. diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index e08af6dc..c3eabc42 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -77,16 +77,10 @@ def init_internal(self, all_data): def get_all_internal(self, kind): r = redis.Redis(connection_pool=self._pool) - try: - all_items = r.hgetall(self._items_key(kind)) - except BaseException as e: - log.error("RedisFeatureStore: Could not retrieve '%s' from Redis with error: %s. Returning None.", - kind.namespace, e) - return None + all_items = r.hgetall(self._items_key(kind)) if all_items is None or all_items is "": - log.warn("RedisFeatureStore: call to get all '%s' returned no results. Returning None.", kind.namespace) - return None + all_items = {} results = {} for key, item_json in all_items.items(): @@ -95,13 +89,8 @@ def get_all_internal(self, kind): return results def get_internal(self, kind, key): - try: - r = redis.Redis(connection_pool=self._pool) - item_json = r.hget(self._items_key(kind), key) - except BaseException as e: - log.error("RedisFeatureStore: Could not retrieve key %s from '%s' with error: %s", - key, kind.namespace, e) - return None + r = redis.Redis(connection_pool=self._pool) + item_json = r.hget(self._items_key(kind), key) if item_json is None or item_json is "": log.debug("RedisFeatureStore: key %s not found in '%s'. Returning None.", key, kind.namespace) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 5716fa0e..ffff39a8 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -8,14 +8,6 @@ from ldclient.versioned_data_kind import FEATURES -def get_log_lines(caplog): - loglines = caplog.records - if callable(loglines): - # records() is a function in older versions of the caplog plugin - loglines = loglines() - return loglines - - class TestFeatureStore: redis_host = 'localhost' redis_port = 6379 @@ -178,25 +170,3 @@ def hook(base_key, key): store.upsert(FEATURES, feature) result = store.get(FEATURES, 'flagkey', lambda x: x) assert result['version'] == 5 - - def test_exception_is_handled_in_get(self, caplog): - # This just verifies the fix for a bug that caused an error during exception handling in Python 3 - store = RedisFeatureStore(url='redis://bad') - feature = store.get(FEATURES, 'flagkey') - assert feature is None - loglines = get_log_lines(caplog) - assert len(loglines) == 2 - message = loglines[1].message - assert message.startswith("RedisFeatureStore: Could not retrieve key flagkey from 'features' with error:") - assert "connecting to bad:6379" in message - - def test_exception_is_handled_in_all(self, caplog): - # This just verifies the fix for a bug that caused an error during exception handling in Python 3 - store = RedisFeatureStore(url='redis://bad') - all = store.all(FEATURES, lambda x: x) - assert all == {} - loglines = get_log_lines(caplog) - assert len(loglines) == 2 - message = loglines[1].message - assert message.startswith("RedisFeatureStore: Could not retrieve 'features' from Redis") - assert "connecting to bad:6379" in message diff --git a/testing/test_feature_store_helpers.py b/testing/test_feature_store_helpers.py index 01bb245a..77ccb6f8 100644 --- a/testing/test_feature_store_helpers.py +++ b/testing/test_feature_store_helpers.py @@ -16,20 +16,25 @@ def __init__(self): self.data = {} self.inited = False self.inited_query_count = 0 + self.error = None def init_internal(self, all_data): + self._maybe_throw() self.data = {} for kind, items in all_data.items(): self.data[kind] = items.copy() def get_internal(self, kind, key): + self._maybe_throw() items = self.data.get(kind) return None if items is None else items.get(key) def get_all_internal(self, kind): + self._maybe_throw() return self.data.get(kind) def upsert_internal(self, kind, item): + self._maybe_throw() key = item.get('key') items = self.data.get(kind) if items is None: @@ -42,9 +47,14 @@ def upsert_internal(self, kind, item): return old_item def initialized_internal(self): + self._maybe_throw() self.inited_query_count = self.inited_query_count + 1 return self.inited - + + def _maybe_throw(self): + if self.error is not None: + raise self.error + def force_set(self, kind, item): items = self.data.get(kind) if items is None: @@ -57,6 +67,9 @@ def force_remove(self, kind, key): if items is not None: items.pop(key, None) +class CustomError(Exception): + pass + class TestCachingStoreWrapper: @pytest.mark.parametrize("cached", [False, True]) def test_get_item(self, cached): @@ -119,6 +132,14 @@ def test_cached_get_uses_values_from_init(self): core.force_remove(THINGS, item1["key"]) assert wrapper.get(THINGS, item1["key"]) == item1 + @pytest.mark.parametrize("cached", [False, True]) + def test_get_can_throw_exception(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + core.error = CustomError() + with pytest.raises(CustomError, message="expected exception"): + wrapper.get(THINGS, "key", lambda x: x) + @pytest.mark.parametrize("cached", [False, True]) def test_get_all(self, cached): core = MockCore() @@ -128,13 +149,13 @@ def test_get_all(self, cached): core.force_set(THINGS, item1) core.force_set(THINGS, item2) - assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1, item2["key"]: item2 } + assert wrapper.all(THINGS) == { item1["key"]: item1, item2["key"]: item2 } core.force_remove(THINGS, item2["key"]) if cached: - assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1, item2["key"]: item2 } + assert wrapper.all(THINGS) == { item1["key"]: item1, item2["key"]: item2 } else: - assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1 } + assert wrapper.all(THINGS) == { item1["key"]: item1 } @pytest.mark.parametrize("cached", [False, True]) def test_get_all_removes_deleted_items(self, cached): @@ -145,14 +166,14 @@ def test_get_all_removes_deleted_items(self, cached): core.force_set(THINGS, item1) core.force_set(THINGS, item2) - assert wrapper.all(THINGS, lambda x: x) == { item1["key"]: item1 } + assert wrapper.all(THINGS) == { item1["key"]: item1 } @pytest.mark.parametrize("cached", [False, True]) def test_get_all_changes_None_to_empty_dict(self, cached): core = MockCore() wrapper = make_wrapper(core, cached) - assert wrapper.all(WRONG_THINGS, lambda x:x) == {} + assert wrapper.all(WRONG_THINGS) == {} @pytest.mark.parametrize("cached", [False, True]) def test_get_all_iwith_lambda(self, cached): @@ -176,7 +197,15 @@ def test_cached_get_all_uses_values_from_init(self): wrapper.init({ THINGS: both }) core.force_remove(THINGS, item1["key"]) - assert wrapper.all(THINGS, lambda x: x) == both + assert wrapper.all(THINGS) == both + + @pytest.mark.parametrize("cached", [False, True]) + def test_get_all_can_throw_exception(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + core.error = CustomError() + with pytest.raises(CustomError, message="expected exception"): + wrapper.all(THINGS) @pytest.mark.parametrize("cached", [False, True]) def test_upsert_successful(self, cached): @@ -221,6 +250,14 @@ def test_cached_upsert_unsuccessful(self): core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache assert wrapper.get(THINGS, key) == itemv2 + @pytest.mark.parametrize("cached", [False, True]) + def test_upsert_can_throw_exception(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + core.error = CustomError() + with pytest.raises(CustomError, message="expected exception"): + wrapper.upsert(THINGS, { "key": "x", "version": 1 }) + @pytest.mark.parametrize("cached", [False, True]) def test_delete(self, cached): core = MockCore() @@ -239,6 +276,14 @@ def test_delete(self, cached): core.force_set(THINGS, itemv3) # make a change that bypasses the cache assert wrapper.get(THINGS, key) == (None if cached else itemv3) + @pytest.mark.parametrize("cached", [False, True]) + def test_delete_can_throw_exception(self, cached): + core = MockCore() + wrapper = make_wrapper(core, cached) + core.error = CustomError() + with pytest.raises(CustomError, message="expected exception"): + wrapper.delete(THINGS, "x", 1) + def test_uncached_initialized_queries_state_only_until_inited(self): core = MockCore() wrapper = make_wrapper(core, False) diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index 46c48756..e48f0329 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -4,6 +4,7 @@ from ldclient.client import LDClient, Config from ldclient.feature_store import InMemoryFeatureStore from ldclient.flag import EvaluationDetail +from ldclient.interfaces import FeatureStore from ldclient.versioned_data_kind import FEATURES from testing.stub_util import MockEventProcessor, MockUpdateProcessor from testing.test_ldclient import make_off_flag_with_value @@ -28,6 +29,17 @@ 'debugEventsUntilDate': 1000 } +class ErroringFeatureStore(FeatureStore): + def get(self, kind, key, callback=lambda x: x): + raise NotImplementedError() + + def all(self, kind, callback=lambda x: x): + raise NotImplementedError() + + @property + def initialized(self): + return True + def make_client(store): return LDClient(config=Config(sdk_key='SDK_KEY', base_uri='http://test', @@ -35,6 +47,14 @@ def make_client(store): update_processor_class=MockUpdateProcessor, feature_store=store)) +def get_log_lines(caplog): + loglines = caplog.records + if callable(loglines): + # records() is a function in older versions of the caplog plugin + loglines = loglines() + return loglines + + def test_variation_for_existing_feature(): feature = make_off_flag_with_value('feature.key', 'value') store = InMemoryFeatureStore() @@ -116,6 +136,25 @@ def test_variation_detail_for_flag_that_evaluates_to_none(): assert expected == actual assert actual.is_default_value() == True +def test_variation_when_feature_store_throws_error(caplog): + store = ErroringFeatureStore() + client = make_client(store) + assert client.variation('feature.key', { "key": "user" }, default='default') == 'default' + loglines = get_log_lines(caplog) + assert len(loglines) == 1 + assert loglines[0].message == 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' + +def test_variation_detail_when_feature_store_throws_error(caplog): + store = ErroringFeatureStore() + client = make_client(store) + expected = EvaluationDetail('default', None, {'kind': 'ERROR', 'errorKind': 'EXCEPTION'}) + actual = client.variation_detail('feature.key', { }, default='default') + assert expected == actual + assert actual.is_default_value() == True + loglines = get_log_lines(caplog) + assert len(loglines) == 1 + assert loglines[0].message == 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' + def test_all_flags_returns_values(): store = InMemoryFeatureStore() store.init({ FEATURES: { 'key1': flag1, 'key2': flag2 } }) @@ -137,6 +176,14 @@ def test_all_flags_returns_none_if_user_has_no_key(): result = client.all_flags({ }) assert result is None +def test_all_flags_returns_none_if_feature_store_throws_error(caplog): + store = ErroringFeatureStore() + client = make_client(store) + assert client.all_flags({ "key": "user" }) is None + loglines = get_log_lines(caplog) + assert len(loglines) == 1 + assert loglines[0].message == 'Unable to read flags for all_flag_state: NotImplementedError()' + def test_all_flags_state_returns_state(): store = InMemoryFeatureStore() store.init({ FEATURES: { 'key1': flag1, 'key2': flag2 } }) @@ -297,3 +344,12 @@ def test_all_flags_state_returns_empty_state_if_user_has_no_key(): client = make_client(store) state = client.all_flags_state({ }) assert state.valid == False + +def test_all_flags_returns_empty_state_if_feature_store_throws_error(caplog): + store = ErroringFeatureStore() + client = make_client(store) + state = client.all_flags_state({ "key": "user" }) + assert state.valid == False + loglines = get_log_lines(caplog) + assert len(loglines) == 1 + assert loglines[0].message == 'Unable to read flags for all_flag_state: NotImplementedError()' From 5f16c8d31337ab03f4b925c5552074f6562d1b55 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 12:48:35 -0800 Subject: [PATCH 023/190] gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 0d1700ee..d988c61f 100644 --- a/.gitignore +++ b/.gitignore @@ -44,6 +44,7 @@ nosetests.xml coverage.xml *,cover .hypothesis/ +.pytest_cache # Translations *.mo From ac0f2eae2fc64b9402b708e1cf418eb1d2ce320a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 31 Dec 2018 13:02:54 -0800 Subject: [PATCH 024/190] misc test fixes --- testing/test_ldclient_evaluation.py | 35 ++++++++++++++++------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index e48f0329..be925a5c 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -36,6 +36,15 @@ def get(self, kind, key, callback=lambda x: x): def all(self, kind, callback=lambda x: x): raise NotImplementedError() + def upsert(self, kind, item): + pass + + def delete(self, key, version): + pass + + def init(self, data): + pass + @property def initialized(self): return True @@ -47,12 +56,12 @@ def make_client(store): update_processor_class=MockUpdateProcessor, feature_store=store)) -def get_log_lines(caplog): +def get_log_lines(caplog, level): loglines = caplog.records if callable(loglines): # records() is a function in older versions of the caplog plugin loglines = loglines() - return loglines + return [line.message for line in loglines if line.levelname == level] def test_variation_for_existing_feature(): @@ -140,20 +149,18 @@ def test_variation_when_feature_store_throws_error(caplog): store = ErroringFeatureStore() client = make_client(store) assert client.variation('feature.key', { "key": "user" }, default='default') == 'default' - loglines = get_log_lines(caplog) - assert len(loglines) == 1 - assert loglines[0].message == 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' + errlog = get_log_lines(caplog, 'ERROR') + assert errlog == [ 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' ] def test_variation_detail_when_feature_store_throws_error(caplog): store = ErroringFeatureStore() client = make_client(store) expected = EvaluationDetail('default', None, {'kind': 'ERROR', 'errorKind': 'EXCEPTION'}) - actual = client.variation_detail('feature.key', { }, default='default') + actual = client.variation_detail('feature.key', { "key": "user" }, default='default') assert expected == actual assert actual.is_default_value() == True - loglines = get_log_lines(caplog) - assert len(loglines) == 1 - assert loglines[0].message == 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' + errlog = get_log_lines(caplog, 'ERROR') + assert errlog == [ 'Unexpected error while retrieving feature flag "feature.key": NotImplementedError()' ] def test_all_flags_returns_values(): store = InMemoryFeatureStore() @@ -180,9 +187,8 @@ def test_all_flags_returns_none_if_feature_store_throws_error(caplog): store = ErroringFeatureStore() client = make_client(store) assert client.all_flags({ "key": "user" }) is None - loglines = get_log_lines(caplog) - assert len(loglines) == 1 - assert loglines[0].message == 'Unable to read flags for all_flag_state: NotImplementedError()' + errlog = get_log_lines(caplog, 'ERROR') + assert errlog == [ 'Unable to read flags for all_flag_state: NotImplementedError()' ] def test_all_flags_state_returns_state(): store = InMemoryFeatureStore() @@ -350,6 +356,5 @@ def test_all_flags_returns_empty_state_if_feature_store_throws_error(caplog): client = make_client(store) state = client.all_flags_state({ "key": "user" }) assert state.valid == False - loglines = get_log_lines(caplog) - assert len(loglines) == 1 - assert loglines[0].message == 'Unable to read flags for all_flag_state: NotImplementedError()' + errlog = get_log_lines(caplog, 'ERROR') + assert errlog == [ 'Unable to read flags for all_flag_state: NotImplementedError()' ] From 256b6fb0ca3eb868f28526a209b194b06267d685 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Jan 2019 12:57:28 -0800 Subject: [PATCH 025/190] implement dependency ordering for feature store data --- ldclient/client.py | 32 ++++++++++++++++++++- ldclient/feature_store.py | 51 ++++++++++++++++++++++++++++++++- ldclient/versioned_data_kind.py | 11 +++++-- testing/stub_util.py | 27 +++++++++++++++-- testing/test_ldclient.py | 35 ++++++++++++++++++++-- 5 files changed, 148 insertions(+), 8 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index eea7d970..3ce19d15 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -12,6 +12,7 @@ from ldclient.feature_requester import FeatureRequesterImpl from ldclient.flag import EvaluationDetail, evaluate, error_reason from ldclient.flags_state import FeatureFlagsState +from ldclient.interfaces import FeatureStore from ldclient.polling import PollingUpdateProcessor from ldclient.streaming import StreamingUpdateProcessor from ldclient.util import check_uwsgi, log @@ -27,6 +28,35 @@ from threading import Lock +class _FeatureStoreClientWrapper(FeatureStore): + """Provides additional behavior that the client requires before or after feature store operations. + Currently this just means sorting the data set for init(). In the future we may also use this + to provide an update listener capability. + """ + + def __init__(self, store): + self.store = store + + def get(self, kind, key, callback): + return self.store.get(self, kind, key, callback) + + def all(self, kind, callback): + return self.store.all(self, kind, callback) + + def init(self, all_data): + return self.store.init(self, all_data) + + def delete(self, kind, key, version): + return self.store.delete(self, kind, key, version) + + def upsert(self, kind, item): + return self.store.upsert(self, kind, item) + + @property + def initialized(self): + return self.store.initialized + + class LDClient(object): def __init__(self, sdk_key=None, config=None, start_wait=5): """Constructs a new LDClient instance. @@ -55,7 +85,7 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): self._event_processor = None self._lock = Lock() - self._store = self._config.feature_store + self._store = _FeatureStoreClientWrapper(self._config.feature_store) """ :type: FeatureStore """ if self._config.offline or not self._config.send_events: diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index e4d2f667..07223a32 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -1,4 +1,4 @@ -from collections import defaultdict +from collections import OrderedDict, defaultdict from ldclient.util import log from ldclient.interfaces import FeatureStore from ldclient.rwlock import ReadWriteLock @@ -126,3 +126,52 @@ def initialized(self): return self._initialized finally: self._lock.runlock() + + +class _FeatureStoreDataSetSorter: + """ + Implements a dependency graph ordering for data to be stored in a feature store. We must use this + on every data set that will be passed to the feature store's init() method. + """ + @staticmethod + def sort_all_collections(all_data): + """ Returns a copy of the input data that has the following guarantees: the iteration order of the outer + dictionary will be in ascending order by the VersionDataKind's :priority property (if any), and for each + data kind that has a "get_dependency_keys" function, the inner dictionary will have an iteration order + where B is before A if A has a dependency on B. + """ + outer_hash = OrderedDict() + kinds = list(all_data.keys()) + def priority_order(kind): + return kind.get('priority', len(kind['namespace'])) # use arbitrary order if there's no priority + kinds.sort(key=priority_order) + for kind in kinds: + items = all_data[kind] + outer_hash[kind] = _FeatureStoreDataSetSorter._sort_collection(kind, items) + return outer_hash + + @staticmethod + def _sort_collection(kind, input): + if len(input) == 0 or not hasattr(kind, 'get_dependency_keys'): + return input + dependency_fn = kind.get_dependency_keys + if dependency_fn is None or len(input) == 0: + return input + remaining_items = input.copy() + items_out = OrderedDict() + while len(remaining_items) > 0: + # pick a random item that hasn't been updated yet + for key, item in remaining_items: + _FeatureStoreDataSetSorter._add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) + break + return items_out + + @staticmethod + def _add_with_dependencies_first(item, dependency_fn, remaining_items, items_out): + key = item.get('key') + del remaining_items[key] # we won't need to visit this item again + for dep_key in dependency_fn(item): + dep_item = remaining_items.get(dep_key) + if dep_item is not None: + _FeatureStoreDataSetSorter._add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) + items_out[key] = item diff --git a/ldclient/versioned_data_kind.py b/ldclient/versioned_data_kind.py index 6df96a32..0054a42e 100644 --- a/ldclient/versioned_data_kind.py +++ b/ldclient/versioned_data_kind.py @@ -10,10 +10,17 @@ VersionedDataKind = namedtuple('VersionedDataKind', ['namespace', 'request_api_path', 'stream_api_path']) +VersionedDataKindWithOrdering = namedtuple('VersionedDataKindWithOrdering', + ['namespace', 'request_api_path', 'stream_api_path', 'priority', 'get_dependency_keys']) + FEATURES = VersionedDataKind(namespace = "features", request_api_path = "/sdk/latest-flags", - stream_api_path = "/flags/") + stream_api_path = "/flags/", + priority = 1, + get_dependency_keys = lambda flag: p.get('key') for p in flag.get('prerequisites', [])) SEGMENTS = VersionedDataKind(namespace = "segments", request_api_path = "/sdk/latest-segments", - stream_api_path = "/segments/") + stream_api_path = "/segments/", + priority = 0, + get_dependency_keys = None) diff --git a/testing/stub_util.py b/testing/stub_util.py index bcb45ef2..80e53af6 100644 --- a/testing/stub_util.py +++ b/testing/stub_util.py @@ -1,14 +1,13 @@ from email.utils import formatdate from requests.structures import CaseInsensitiveDict -from ldclient.interfaces import EventProcessor, FeatureRequester, UpdateProcessor +from ldclient.interfaces import EventProcessor, FeatureRequester, FeatureStore, UpdateProcessor class MockEventProcessor(EventProcessor): def __init__(self, *_): self._running = False self._events = [] - mock_event_processor = self def stop(self): self._running = False @@ -103,3 +102,27 @@ def is_alive(self): def initialized(self): return True + +class CapturingFeatureStore(FeatureStore): + def init(self, all_data): + self.data = all_data + + def get(self, kind, key, callback=lambda x: x): + pass + + def all(self, kind, callback=lambda x: x): + pass + + def delete(self, kind, key, version): + pass + + def upsert(self, kind, item): + pass + + @property + def initialized(self): + return True + + @property + def received_data(self): + return self.data diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 1766386b..be290fda 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -2,10 +2,10 @@ from ldclient.client import LDClient, Config from ldclient.event_processor import NullEventProcessor from ldclient.feature_store import InMemoryFeatureStore -from ldclient.interfaces import FeatureRequester, FeatureStore, UpdateProcessor +from ldclient.interfaces import UpdateProcessor from ldclient.versioned_data_kind import FEATURES import pytest -from testing.stub_util import MockEventProcessor, MockUpdateProcessor +from testing.stub_util import CapturingFeatureStore, MockEventProcessor, MockUpdateProcessor from testing.sync_util import wait_until try: @@ -259,3 +259,34 @@ def test_event_for_existing_feature_with_no_user_key(): def test_secure_mode_hash(): user = {'key': 'Message'} assert offline_client.secure_mode_hash(user) == "aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597" + + +dependency_ordering_test_data = { + FEATURES: { + + }, + SEGMENTS: { + + } +} + +class DependencyOrderingDataUpdateProcessor(UpdateProcessor): + def __init__(self, config, store, ready): + store.init(dependency_ordering_test_data) + ready.set() + + def start(self): + pass + + def initialized(self): + return True + + +def test_store_data_set_ordering(): + store = CapturingFeatureStore() + config = Config(sdk_key = 'SDK_KEY', send_events=False, feature_store=store, + update_processor_class=DependencyOrderingDataUpdateProcessor) + client = LDClient(config=config) + + data = store.received_data + From 289077c9761e1cba7d574732ccd7059fd2ca1ede Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Jan 2019 13:23:49 -0800 Subject: [PATCH 026/190] fix incomplete implementation & test --- ldclient/client.py | 15 ++++++++------- ldclient/feature_store.py | 7 +++++-- ldclient/versioned_data_kind.py | 8 +++++--- testing/test_ldclient.py | 32 +++++++++++++++++++++++++++----- 4 files changed, 45 insertions(+), 17 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 3ce19d15..30c37e53 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -10,6 +10,7 @@ from ldclient.config import Config as Config from ldclient.event_processor import NullEventProcessor from ldclient.feature_requester import FeatureRequesterImpl +from ldclient.feature_store import _FeatureStoreDataSetSorter from ldclient.flag import EvaluationDetail, evaluate, error_reason from ldclient.flags_state import FeatureFlagsState from ldclient.interfaces import FeatureStore @@ -37,20 +38,20 @@ class _FeatureStoreClientWrapper(FeatureStore): def __init__(self, store): self.store = store + def init(self, all_data): + return self.store.init(_FeatureStoreDataSetSorter.sort_all_collections(all_data)) + def get(self, kind, key, callback): - return self.store.get(self, kind, key, callback) + return self.store.get(kind, key, callback) def all(self, kind, callback): - return self.store.all(self, kind, callback) - - def init(self, all_data): - return self.store.init(self, all_data) + return self.store.all(kind, callback) def delete(self, kind, key, version): - return self.store.delete(self, kind, key, version) + return self.store.delete(kind, key, version) def upsert(self, kind, item): - return self.store.upsert(self, kind, item) + return self.store.upsert(kind, item) @property def initialized(self): diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index 07223a32..fccef5b5 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -2,6 +2,7 @@ from ldclient.util import log from ldclient.interfaces import FeatureStore from ldclient.rwlock import ReadWriteLock +from six import iteritems class CacheConfig: @@ -143,7 +144,9 @@ def sort_all_collections(all_data): outer_hash = OrderedDict() kinds = list(all_data.keys()) def priority_order(kind): - return kind.get('priority', len(kind['namespace'])) # use arbitrary order if there's no priority + if hasattr(kind, 'priority'): + return kind.priority + return len(kind.namespace) # use arbitrary order if there's no priority kinds.sort(key=priority_order) for kind in kinds: items = all_data[kind] @@ -161,7 +164,7 @@ def _sort_collection(kind, input): items_out = OrderedDict() while len(remaining_items) > 0: # pick a random item that hasn't been updated yet - for key, item in remaining_items: + for key, item in iteritems(remaining_items): _FeatureStoreDataSetSorter._add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) break return items_out diff --git a/ldclient/versioned_data_kind.py b/ldclient/versioned_data_kind.py index 0054a42e..04acce43 100644 --- a/ldclient/versioned_data_kind.py +++ b/ldclient/versioned_data_kind.py @@ -7,19 +7,21 @@ to add a corresponding constant here and the existing store should be able to handle it. """ +# Note that VersionedDataKind without the extra attributes is no longer used in the SDK, +# but it's preserved here for backward compatibility just in case someone else used it VersionedDataKind = namedtuple('VersionedDataKind', ['namespace', 'request_api_path', 'stream_api_path']) VersionedDataKindWithOrdering = namedtuple('VersionedDataKindWithOrdering', ['namespace', 'request_api_path', 'stream_api_path', 'priority', 'get_dependency_keys']) -FEATURES = VersionedDataKind(namespace = "features", +FEATURES = VersionedDataKindWithOrdering(namespace = "features", request_api_path = "/sdk/latest-flags", stream_api_path = "/flags/", priority = 1, - get_dependency_keys = lambda flag: p.get('key') for p in flag.get('prerequisites', [])) + get_dependency_keys = lambda flag: (p.get('key') for p in flag.get('prerequisites', []))) -SEGMENTS = VersionedDataKind(namespace = "segments", +SEGMENTS = VersionedDataKindWithOrdering(namespace = "segments", request_api_path = "/sdk/latest-segments", stream_api_path = "/segments/", priority = 0, diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index be290fda..4e5dc2f1 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -3,7 +3,7 @@ from ldclient.event_processor import NullEventProcessor from ldclient.feature_store import InMemoryFeatureStore from ldclient.interfaces import UpdateProcessor -from ldclient.versioned_data_kind import FEATURES +from ldclient.versioned_data_kind import FEATURES, SEGMENTS import pytest from testing.stub_util import CapturingFeatureStore, MockEventProcessor, MockUpdateProcessor from testing.sync_util import wait_until @@ -263,10 +263,15 @@ def test_secure_mode_hash(): dependency_ordering_test_data = { FEATURES: { - + "a": { "key": "a", "prerequisites": [ { "key": "b" }, { "key": "c" } ] }, + "b": { "key": "b", "prerequisites": [ { "key": "c" }, { "key": "e" } ] }, + "c": { "key": "c" }, + "d": { "key": "d" }, + "e": { "key": "e" }, + "f": { "key": "f" } }, SEGMENTS: { - + "o": { "key": "o" } } } @@ -286,7 +291,24 @@ def test_store_data_set_ordering(): store = CapturingFeatureStore() config = Config(sdk_key = 'SDK_KEY', send_events=False, feature_store=store, update_processor_class=DependencyOrderingDataUpdateProcessor) - client = LDClient(config=config) + LDClient(config=config) data = store.received_data - + assert data is not None + assert len(data) == 2 + + assert data.keys()[0] == SEGMENTS + assert len(data.values()[0]) == len(dependency_ordering_test_data[SEGMENTS]) + + assert data.keys()[1] == FEATURES + flags_map = data.values()[1] + flags_list = flags_map.values() + assert len(flags_list) == len(dependency_ordering_test_data[FEATURES]) + for item_index, item in enumerate(flags_list): + for prereq in item.get("prerequisites", []): + prereq_item = flags_map[prereq["key"]] + prereq_index = flags_list.index(prereq_item) + if prereq_index > item_index: + all_keys = (f["key"] for f in flags_list) + raise Exception("%s depends on %s, but %s was listed first; keys in order are [%s]" % + (item["key"], prereq["key"], item["key"], ", ".join(all_keys))) From 2c5929497d015c1377d409124173e8b5c88cb7f9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Jan 2019 13:31:23 -0800 Subject: [PATCH 027/190] Python 3.x fix --- testing/test_ldclient.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 4e5dc2f1..a31d2324 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -296,13 +296,15 @@ def test_store_data_set_ordering(): data = store.received_data assert data is not None assert len(data) == 2 + keys = list(data.keys()) + values = list(data.values()) - assert data.keys()[0] == SEGMENTS - assert len(data.values()[0]) == len(dependency_ordering_test_data[SEGMENTS]) + assert keys[0] == SEGMENTS + assert len(values[0]) == len(dependency_ordering_test_data[SEGMENTS]) - assert data.keys()[1] == FEATURES - flags_map = data.values()[1] - flags_list = flags_map.values() + assert keys[1] == FEATURES + flags_map = values[1] + flags_list = list(flags_map.values()) assert len(flags_list) == len(dependency_ordering_test_data[FEATURES]) for item_index, item in enumerate(flags_list): for prereq in item.get("prerequisites", []): From 78b611865e82278339e8fed4a3fe84ee24b24466 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 16:04:39 -0800 Subject: [PATCH 028/190] minor doc fixes --- README.md | 14 ++++++++++---- ldclient/integrations.py | 19 ++++++++++++++++++- 2 files changed, 28 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index edef13e6..d25ee307 100644 --- a/README.md +++ b/README.md @@ -52,7 +52,6 @@ Or it can be set from within python: os.environ["https_proxy"] = "https://web-proxy.domain.com:8080" ``` - If your proxy requires authentication then you can prefix the URN with your login information: ``` export HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 @@ -75,12 +74,19 @@ Your first feature flag # the code to run if the feature is off Supported Python versions ----------- +------------------------- + The SDK is tested with the most recent patch releases of Python 2.7, 3.3, 3.4, 3.5, and 3.6. Python 2.6 is no longer supported. +Database integrations +--------------------- + +Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://github.com/launchdarkly/python-client-private/blob/master/ldclient/integrations.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. + Using flag data from a file --------------------------- -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.py`](https://github.com/launchdarkly/python-client/blob/master/ldclient/file_data_source.py) for more details. + +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.py`](https://github.com/launchdarkly/python-client/blob/master/ldclient/file_data_source.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. Learn more ----------- @@ -100,7 +106,7 @@ Contributing See [CONTRIBUTING](CONTRIBUTING.md) for more information. About LaunchDarkly ------------ +------------------ * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 6102d354..63c01202 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -15,8 +15,21 @@ def new_feature_store(table_name, caching=CacheConfig.default()): """Creates a DynamoDB-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + To use this method, you must first install the `boto3` package containing the AWS SDK gems. + Then, put the object returned by this method into the `feature_store` property of your + client configuration (:class:ldclient.config.Config). + + Note that the DynamoDB table must already exist; the LaunchDarkly SDK does not create the table + automatically, because it has no way of knowing what additional properties (such as permissions + and throughput) you would want it to have. The table must have a partition key called + "namespace" and a sort key called "key", both with a string type. + + By default, the DynamoDB client will try to get your AWS credentials and region name from + environment variables and/or local configuration files, as described in the AWS SDK documentation. + You may also pass configuration settings in `dynamodb_opts`. + :param string table_name: The name of an existing DynamoDB table - :param string prefix: An optional namespace prefix to be prepended to all Redis keys + :param string prefix: An optional namespace prefix to be prepended to all DynamoDB keys :param dict dynamodb_opts: Optional parameters for configuring the DynamoDB client, as defined in the boto3 API; see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client :param CacheConfig caching: Specifies whether local caching should be enabled and if so, @@ -40,6 +53,10 @@ def new_feature_store(url='redis://localhost:6379/0', caching=CacheConfig.default()): """Creates a Redis-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + To use this method, you must first install the `redis` package. Then, put the object + returned by this method into the `feature_store` property of your client configuration + (:class:ldclient.config.Config). + :param string url: The URL of the Redis host; defaults to `DEFAULT_URL` :param string prefix: A namespace prefix to be prepended to all Redis keys; defaults to `DEFAULT_PREFIX` From 3eb821c483dfe9ae5a8d6b6d62a717bc6d32fc5b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 14:12:33 -0800 Subject: [PATCH 029/190] feature store test improvements --- ldclient/redis_feature_store.py | 4 +-- testing/test_feature_store.py | 47 ++++++++++++++++++++++++++++----- 2 files changed, 42 insertions(+), 9 deletions(-) diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index 27139567..16302212 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -59,10 +59,10 @@ def initialized(self): class _RedisFeatureStoreCore(FeatureStoreCore): def __init__(self, url, prefix, max_connections): - self._prefix = prefix + self._prefix = prefix or 'launchdarkly' self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) self.test_update_hook = None # exposed for testing - log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + prefix) + log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + self._prefix) def _items_key(self, kind): return "{0}:{1}".format(self._prefix, kind.namespace) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 8ab8c422..f6912ff3 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -15,6 +15,10 @@ class InMemoryTester(object): def init_store(self): return InMemoryFeatureStore() + @property + def supports_prefix(self): + return False + class RedisTester(object): redis_host = 'localhost' @@ -23,19 +27,27 @@ class RedisTester(object): def __init__(self, cache_config): self._cache_config = cache_config - def init_store(self): + def init_store(self, prefix=None): self._clear_data() - return Redis.new_feature_store(caching=self._cache_config) + return Redis.new_feature_store(caching=self._cache_config, prefix=prefix) + + @property + def supports_prefix(self): + return True def _clear_data(self): r = redis.StrictRedis(host=self.redis_host, port=self.redis_port, db=0) - r.delete("launchdarkly:features") + r.flushdb() class RedisWithDeprecatedConstructorTester(RedisTester): - def init_store(self): + def init_store(self, prefix=None): self._clear_data() - return RedisFeatureStore(expiration=(30 if self._cache_config.enabled else 0)) + return RedisFeatureStore(expiration=(30 if self._cache_config.enabled else 0), prefix=prefix) + + @property + def supports_prefix(self): + return True class DynamoDBTester(object): @@ -51,10 +63,14 @@ class DynamoDBTester(object): def __init__(self, cache_config): self._cache_config = cache_config - def init_store(self): + def init_store(self, prefix=None): self._create_table() self._clear_data() - return DynamoDB.new_feature_store(self.table_name, dynamodb_opts=self.options) + return DynamoDB.new_feature_store(self.table_name, prefix=prefix, dynamodb_opts=self.options) + + @property + def supports_prefix(self): + return True def _create_table(self): if self.table_created: @@ -131,6 +147,10 @@ class TestFeatureStore: DynamoDBTester(CacheConfig.disabled()) ] + @pytest.fixture(params=params) + def tester(self, request): + return request.param + @pytest.fixture(params=params) def store(self, request): return request.param.init_store() @@ -230,6 +250,19 @@ def test_upsert_older_version_after_delete(self, store): store.upsert(FEATURES, old_ver) assert store.get(FEATURES, 'foo', lambda x: x) is None + def test_stores_with_different_prefixes_are_independent(self, tester): + if not tester.supports_prefix: + return + store_a = tester.init_store('a') + store_b = tester.init_store('b') + flag = { 'key': 'flag', 'version': 1 } + store_a.init({ FEATURES: { flag['key']: flag } }) + store_b.init({ FEATURES: { } }) + item = store_a.get(FEATURES, flag['key'], lambda x: x) + assert item == flag + item = store_b.get(FEATURES, flag['key'], lambda x: x) + assert item is None + class TestRedisFeatureStoreExtraTests: def test_upsert_race_condition_against_external_client_with_higher_version(self): From cc938e33221b35daf612b10c881e87c5b5b60056 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 16:14:39 -0800 Subject: [PATCH 030/190] better database prefix test --- testing/test_feature_store.py | 31 +++++++++++++++++++++++++------ 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index f6912ff3..6c0f0c5e 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -251,17 +251,36 @@ def test_upsert_older_version_after_delete(self, store): assert store.get(FEATURES, 'foo', lambda x: x) is None def test_stores_with_different_prefixes_are_independent(self, tester): + # This verifies that init, get, and upsert are all correctly using the specified key prefix. if not tester.supports_prefix: return + + flag_a1 = { 'key': 'flagA1', 'version': 1 } + flag_a2 = { 'key': 'flagA2', 'version': 1 } + flag_b1 = { 'key': 'flagB1', 'version': 1 } + flag_b2 = { 'key': 'flagB2', 'version': 1 } store_a = tester.init_store('a') store_b = tester.init_store('b') - flag = { 'key': 'flag', 'version': 1 } - store_a.init({ FEATURES: { flag['key']: flag } }) - store_b.init({ FEATURES: { } }) - item = store_a.get(FEATURES, flag['key'], lambda x: x) - assert item == flag - item = store_b.get(FEATURES, flag['key'], lambda x: x) + + store_a.init({ FEATURES: { 'flagA1': flag_a1 } }) + store_a.upsert(FEATURES, flag_a2) + + store_b.init({ FEATURES: { 'flagB1': flag_b1 } }) + store_b.upsert(FEATURES, flag_b2) + + item = store_a.get(FEATURES, 'flagA1', lambda x: x) + assert item == flag_a1 + item = store_a.get(FEATURES, 'flagB1', lambda x: x) + assert item is None + items = store_a.all(FEATURES, lambda x: x) + assert items == { 'flagA1': flag_a1, 'flagA2': flag_a2 } + + item = store_b.get(FEATURES, 'flagB1', lambda x: x) + assert item == flag_b1 + item = store_b.get(FEATURES, 'flagA1', lambda x: x) assert item is None + items = store_b.all(FEATURES, lambda x: x) + assert items == { 'flagB1': flag_b1, 'flagB2': flag_b2 } class TestRedisFeatureStoreExtraTests: From 5b8b33745521e5909d01fa2982a66b4b28901cb7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 16:20:32 -0800 Subject: [PATCH 031/190] clarify comment --- testing/test_feature_store.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 6c0f0c5e..35a2ef6e 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -251,7 +251,8 @@ def test_upsert_older_version_after_delete(self, store): assert store.get(FEATURES, 'foo', lambda x: x) is None def test_stores_with_different_prefixes_are_independent(self, tester): - # This verifies that init, get, and upsert are all correctly using the specified key prefix. + # This verifies that init(), get(), all(), and upsert() are all correctly using the specified key prefix. + # The delete() method isn't tested separately because it's implemented as a variant of upsert(). if not tester.supports_prefix: return From f9ce243f9e6e49dadae858fd2bfc654f41b56f7c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 17:47:16 -0800 Subject: [PATCH 032/190] add Consul feature store integration --- .circleci/config.yml | 11 +- consul-requirements.txt | 1 + ldclient/impl/__init__.py | 0 ldclient/impl/integrations/__init__.py | 0 ldclient/impl/integrations/consul/__init__.py | 0 .../impl/integrations/consul/feature_store.py | 125 ++++++++++++++++++ ldclient/integrations.py | 36 +++++ testing/test_feature_store.py | 37 +++++- 8 files changed, 207 insertions(+), 3 deletions(-) create mode 100644 consul-requirements.txt create mode 100644 ldclient/impl/__init__.py create mode 100644 ldclient/impl/integrations/__init__.py create mode 100644 ldclient/impl/integrations/consul/__init__.py create mode 100644 ldclient/impl/integrations/consul/feature_store.py diff --git a/.circleci/config.yml b/.circleci/config.yml index 92699a3c..5c83ba64 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,13 +17,16 @@ test-template: &test-template command: | sudo pip install --upgrade pip setuptools; sudo pip install -r test-requirements.txt; + if [[ "$CIRCLE_JOB != test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then + sudo pip install -r consul-requirements.text; + fi; sudo python setup.py install; pip freeze - run: name: run tests command: | mkdir test-reports; - if [[ $CIRCLE_JOB == test-2.7 ]]; then + if [[ "$CIRCLE_JOB" == "test-2.7" ]]; then pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing; sh -c '[ -n "${CODECLIMATE_REPO_TOKEN+1}" ] && codeclimate-test-reporter || echo "No CODECLIMATE_REPO_TOKEN value is set; not publishing coverage report"'; else @@ -41,33 +44,39 @@ jobs: - image: circleci/python:2.7-jessie - image: redis - image: amazon/dynamodb-local + - image: consul test-3.3: <<: *test-template docker: - image: circleci/python:3.3-jessie - image: redis - image: amazon/dynamodb-local + # python-consul doesn't support Python 3.3 test-3.4: <<: *test-template docker: - image: circleci/python:3.4-jessie - image: redis - image: amazon/dynamodb-local + # python-consul doesn't support Python 3.4 test-3.5: <<: *test-template docker: - image: circleci/python:3.5-jessie - image: redis - image: amazon/dynamodb-local + - image: consul test-3.6: <<: *test-template docker: - image: circleci/python:3.6-jessie - image: redis - image: amazon/dynamodb-local + - image: consul test-3.7: <<: *test-template docker: - image: circleci/python:3.7-stretch - image: redis - image: amazon/dynamodb-local + - image: consul diff --git a/consul-requirements.txt b/consul-requirements.txt new file mode 100644 index 00000000..637f7fe1 --- /dev/null +++ b/consul-requirements.txt @@ -0,0 +1 @@ +python-consul>=1.0.1 diff --git a/ldclient/impl/__init__.py b/ldclient/impl/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/__init__.py b/ldclient/impl/integrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/consul/__init__.py b/ldclient/impl/integrations/consul/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/consul/feature_store.py b/ldclient/impl/integrations/consul/feature_store.py new file mode 100644 index 00000000..5fe2d8ad --- /dev/null +++ b/ldclient/impl/integrations/consul/feature_store.py @@ -0,0 +1,125 @@ +import json + +have_consul = False +try: + import consul + have_consul = True +except ImportError: + pass + +from ldclient import log +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.interfaces import FeatureStore, FeatureStoreCore + +# +# Internal implementation of the Consul feature store. +# +# Implementation notes: +# +# * Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish +# to store, are stored as individual items with the key "{prefix}/features/{flag-key}", +# "{prefix}/segments/{segment-key}", etc. +# +# * The special key "{prefix}/$inited" indicates that the store contains a complete data set. +# +# * Since Consul has limited support for transactions (they can't contain more than 64 +# operations), the init method-- which replaces the entire data store-- is not guaranteed to +# be atomic, so there can be a race condition if another process is adding new data via +# Upsert. To minimize this, we don't delete all the data at the start; instead, we update +# the items we've received, and then delete all other items. That could potentially result in +# deleting new data from another process, but that would be the case anyway if the Init +# happened to execute later than the Upsert; we are relying on the fact that normally the +# process that did the Init will also receive the new data shortly and do its own Upsert. +# + +class _ConsulFeatureStoreCore(FeatureStoreCore): + def __init__(self, host, port, prefix, consul_opts): + if not have_consul: + raise NotImplementedError("Cannot use Consul feature store because the python-consul package is not installed") + opts = consul_opts or {} + if host is not None: + opts['host'] = host + if port is not None: + opts['port'] = port + self._prefix = ("launchdarkly" if prefix is None else prefix) + "/" + self._client = consul.Consul(**opts) + + def init_internal(self, all_data): + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + index, keys = self._client.kv.get(self._prefix, recurse=True, keys=True) + unused_old_keys = set(keys or []) + + num_items = 0 + inited_key = self._inited_key() + unused_old_keys.discard(inited_key) + + # Insert or update every provided item. Note that this Consul client doesn't support batch + # operations (the "txn" method), so we'll write them one at a time. + for kind, items in all_data.items(): + for key, item in items.items(): + encoded_item = json.dumps(item) + db_key = self._item_key(kind, item['key']) + self._client.kv.put(db_key, encoded_item) + unused_old_keys.discard(db_key) + num_items = num_items + 1 + + # Now delete any previously existing items whose keys were not in the current data + for key in unused_old_keys: + self._client.kv.delete(key) + + # Now set the special key that we check in initialized_internal() + self._client.kv.put(inited_key, "") + + log.info('Initialized Consul store with %d items', num_items) + + def get_internal(self, kind, key): + index, resp = self._client.kv.get(self._item_key(kind, key)) + return None if resp is None else json.loads(resp['Value']) + + def get_all_internal(self, kind): + items_out = {} + index, results = self._client.kv.get(self._kind_key(kind), recurse=True) + for result in results: + item = json.loads(result['Value']) + items_out[item['key']] = item + return items_out + + def upsert_internal(self, kind, new_item): + key = self._item_key(kind, new_item['key']) + encoded_item = json.dumps(new_item) + + # We will potentially keep retrying indefinitely until someone's write succeeds + while True: + index, old_value = self._client.kv.get(key) + if old_value is None: + mod_index = 0 + else: + old_item = json.loads(old_value['Value']) + # Check whether the item is stale. If so, don't do the update (and return the existing item to + # CachingStoreWrapper so it can be cached) + if old_item['version'] >= new_item['version']: + return old_item + mod_index = old_value['ModifyIndex'] + + # Otherwise, try to write. We will do a compare-and-set operation, so the write will only succeed if + # the key's ModifyIndex is still equal to the previous value. If the previous ModifyIndex was zero, + # it means the key did not previously exist and the write will only succeed if it still doesn't exist. + success = self._client.kv.put(key, encoded_item, cas=mod_index) + if success: + return new_item + + log.debug('Concurrent modification detected, retrying') + + def initialized_internal(self): + index, resp = self._client.kv.get(self._inited_key()) + return (resp is not None) + + def _kind_key(self, kind): + return self._prefix + kind.namespace + + def _item_key(self, kind, key): + return self._kind_key(kind) + '/' + key + + def _inited_key(self): + return self._prefix + ('$inited') diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 63c01202..aa74da1e 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -1,9 +1,41 @@ from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper +from ldclient.impl.integrations.consul.feature_store import _ConsulFeatureStoreCore from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore from ldclient.redis_feature_store import _RedisFeatureStoreCore +class Consul(object): + """Provides factory methods for integrations between the LaunchDarkly SDK and Consul. + """ + + @staticmethod + def new_feature_store(host=None, + port=None, + prefix=None, + consul_opts=None, + caching=CacheConfig.default()): + """Creates a Consul-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + For more details about how and why you can use a persistent feature store, see the + SDK reference guide: https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store + + To use this method, you must first install the `python-consul` package. Then, put the object + returned by this method into the `feature_store` property of your client configuration + (:class:ldclient.config.Config). + + :param string host: Hostname of the Consul server (uses "localhost" if omitted) + :param int port: Port of the Consul server (uses 8500 if omitted) + :param string prefix: An optional namespace prefix to be prepended to all Consul keys + :param dict consul_opts: Optional parameters for configuring the Consul client, if you need + to set any of them besides host and port, as defined in the python-consul API; see + https://python-consul.readthedocs.io/en/latest/#consul + :param CacheConfig caching: Specifies whether local caching should be enabled and if so, + sets the cache properties; defaults to `CacheConfig.default()` + """ + core = _ConsulFeatureStoreCore(host, port, prefix, consul_opts) + return CachingStoreWrapper(core, caching) + + class DynamoDB(object): """Provides factory methods for integrations between the LaunchDarkly SDK and DynamoDB. """ @@ -14,6 +46,8 @@ def new_feature_store(table_name, dynamodb_opts={}, caching=CacheConfig.default()): """Creates a DynamoDB-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + For more details about how and why you can use a persistent feature store, see the + SDK reference guide: https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store To use this method, you must first install the `boto3` package containing the AWS SDK gems. Then, put the object returned by this method into the `feature_store` property of your @@ -52,6 +86,8 @@ def new_feature_store(url='redis://localhost:6379/0', max_connections=16, caching=CacheConfig.default()): """Creates a Redis-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + For more details about how and why you can use a persistent feature store, see the + SDK reference guide: https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store To use this method, you must first install the `redis` package. Then, put the object returned by this method into the `feature_store` property of your client configuration diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 35a2ef6e..76a7f41e 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,12 +1,21 @@ import boto3 +import consul import json import pytest import redis import time +# Consul is only supported in some Python versions +have_consul = False +try: + import consul + have_consul = True +except ImportError: + pass + from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore, _DynamoDBHelpers from ldclient.feature_store import CacheConfig, InMemoryFeatureStore -from ldclient.integrations import DynamoDB, Redis +from ldclient.integrations import Consul, DynamoDB, Redis from ldclient.redis_feature_store import RedisFeatureStore from ldclient.versioned_data_kind import FEATURES @@ -50,6 +59,25 @@ def supports_prefix(self): return True +class ConsulTester(object): + def __init__(self, cache_config): + self._cache_config = cache_config + + def init_store(self, prefix=None): + self._clear_data(prefix or "launchdarkly") + return Consul.new_feature_store(prefix=prefix, caching=self._cache_config) + + @property + def supports_prefix(self): + return True + + def _clear_data(self, prefix): + client = consul.Consul() + index, keys = client.kv.get(prefix + "/", recurse=True, keys=True) + for key in (keys or []): + client.kv.delete(key) + + class DynamoDBTester(object): table_name = 'LD_DYNAMODB_TEST_TABLE' table_created = False @@ -66,7 +94,8 @@ def __init__(self, cache_config): def init_store(self, prefix=None): self._create_table() self._clear_data() - return DynamoDB.new_feature_store(self.table_name, prefix=prefix, dynamodb_opts=self.options) + return DynamoDB.new_feature_store(self.table_name, prefix=prefix, dynamodb_opts=self.options, + caching=self._cache_config) @property def supports_prefix(self): @@ -147,6 +176,10 @@ class TestFeatureStore: DynamoDBTester(CacheConfig.disabled()) ] + if have_consul: + params.append(ConsulTester(CacheConfig.default())) + params.append(ConsulTester(CacheConfig.disabled())) + @pytest.fixture(params=params) def tester(self, request): return request.param From 89a96be19b24163292c0b00a46638325f3cf780e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 17:49:25 -0800 Subject: [PATCH 033/190] typo --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5c83ba64..8671b022 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -18,7 +18,7 @@ test-template: &test-template sudo pip install --upgrade pip setuptools; sudo pip install -r test-requirements.txt; if [[ "$CIRCLE_JOB != test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then - sudo pip install -r consul-requirements.text; + sudo pip install -r consul-requirements.txt; fi; sudo python setup.py install; pip freeze From da8c1a67b8492e30800f411a1616538f8ee665e2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 17:53:21 -0800 Subject: [PATCH 034/190] rm extra import --- testing/test_feature_store.py | 1 - 1 file changed, 1 deletion(-) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 76a7f41e..6370a848 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,5 +1,4 @@ import boto3 -import consul import json import pytest import redis From b19e6188d834c5e1997050200e0c59b9664a842a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:04:15 -0800 Subject: [PATCH 035/190] fix byte/string issue and rename file --- .../consul/{feature_store.py => consul_feature_store.py} | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) rename ldclient/impl/integrations/consul/{feature_store.py => consul_feature_store.py} (97%) diff --git a/ldclient/impl/integrations/consul/feature_store.py b/ldclient/impl/integrations/consul/consul_feature_store.py similarity index 97% rename from ldclient/impl/integrations/consul/feature_store.py rename to ldclient/impl/integrations/consul/consul_feature_store.py index 5fe2d8ad..6fc8652e 100644 --- a/ldclient/impl/integrations/consul/feature_store.py +++ b/ldclient/impl/integrations/consul/consul_feature_store.py @@ -75,13 +75,13 @@ def init_internal(self, all_data): def get_internal(self, kind, key): index, resp = self._client.kv.get(self._item_key(kind, key)) - return None if resp is None else json.loads(resp['Value']) + return None if resp is None else json.loads(resp['Value'].decode('utf-8')) def get_all_internal(self, kind): items_out = {} index, results = self._client.kv.get(self._kind_key(kind), recurse=True) for result in results: - item = json.loads(result['Value']) + item = json.loads(result['Value'].decode('utf-8')) items_out[item['key']] = item return items_out @@ -95,7 +95,7 @@ def upsert_internal(self, kind, new_item): if old_value is None: mod_index = 0 else: - old_item = json.loads(old_value['Value']) + old_item = json.loads(old_value['Value'].decode('utf-8')) # Check whether the item is stale. If so, don't do the update (and return the existing item to # CachingStoreWrapper so it can be cached) if old_item['version'] >= new_item['version']: From db621dc4d72d90b87a6474a06cf010a55b3d3bf2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:04:36 -0800 Subject: [PATCH 036/190] rename file --- ldclient/integrations.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ldclient/integrations.py b/ldclient/integrations.py index aa74da1e..d2d55354 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -1,6 +1,6 @@ from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper -from ldclient.impl.integrations.consul.feature_store import _ConsulFeatureStoreCore +from ldclient.impl.integrations.consul.consul_feature_store import _ConsulFeatureStoreCore from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore from ldclient.redis_feature_store import _RedisFeatureStoreCore @@ -23,6 +23,9 @@ def new_feature_store(host=None, returned by this method into the `feature_store` property of your client configuration (:class:ldclient.config.Config). + Note that `python-consul` is not available for Python 3.3 or 3.4, so this feature cannot be + used in those Python versions. + :param string host: Hostname of the Consul server (uses "localhost" if omitted) :param int port: Port of the Consul server (uses 8500 if omitted) :param string prefix: An optional namespace prefix to be prepended to all Consul keys From b09e07eabba1410adba388cce7980488238dba8a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:04:45 -0800 Subject: [PATCH 037/190] docs --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d25ee307..61e67050 100644 --- a/README.md +++ b/README.md @@ -81,7 +81,9 @@ The SDK is tested with the most recent patch releases of Python 2.7, 3.3, 3.4, 3 Database integrations --------------------- -Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://github.com/launchdarkly/python-client-private/blob/master/ldclient/integrations.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +Feature flag data can be kept in a persistent store using Consul, DynamoDB, or Redis. These adapters are implemented in the `Consul`, `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://github.com/launchdarkly/python-client-private/blob/master/ldclient/integrations.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. + +Note that Consul is not supported in Python 3.3 or 3.4. Using flag data from a file --------------------------- From 9ea89ca60c501c4795e663ef0b36738e082fb3ae Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:09:42 -0800 Subject: [PATCH 038/190] script typo --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8671b022..714c5ee1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,7 +17,7 @@ test-template: &test-template command: | sudo pip install --upgrade pip setuptools; sudo pip install -r test-requirements.txt; - if [[ "$CIRCLE_JOB != test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then + if [[ "$CIRCLE_JOB" != "test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then sudo pip install -r consul-requirements.txt; fi; sudo python setup.py install; From a50e6f35d14de0b0689ee49d419f63b51bd049b4 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 18:30:38 -0800 Subject: [PATCH 039/190] move all low-level feature store integration code into submodules --- .../impl/integrations/dynamodb/__init__.py | 0 .../dynamodb}/dynamodb_feature_store.py | 0 ldclient/impl/integrations/redis/__init__.py | 0 .../integrations/redis/redis_feature_store.py | 101 +++++++++++++++++ ldclient/integrations.py | 9 +- ldclient/redis_feature_store.py | 107 +----------------- testing/test_feature_store.py | 2 +- 7 files changed, 112 insertions(+), 107 deletions(-) create mode 100644 ldclient/impl/integrations/dynamodb/__init__.py rename ldclient/{ => impl/integrations/dynamodb}/dynamodb_feature_store.py (100%) create mode 100644 ldclient/impl/integrations/redis/__init__.py create mode 100644 ldclient/impl/integrations/redis/redis_feature_store.py diff --git a/ldclient/impl/integrations/dynamodb/__init__.py b/ldclient/impl/integrations/dynamodb/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/dynamodb_feature_store.py b/ldclient/impl/integrations/dynamodb/dynamodb_feature_store.py similarity index 100% rename from ldclient/dynamodb_feature_store.py rename to ldclient/impl/integrations/dynamodb/dynamodb_feature_store.py diff --git a/ldclient/impl/integrations/redis/__init__.py b/ldclient/impl/integrations/redis/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/redis/redis_feature_store.py b/ldclient/impl/integrations/redis/redis_feature_store.py new file mode 100644 index 00000000..f0be83a4 --- /dev/null +++ b/ldclient/impl/integrations/redis/redis_feature_store.py @@ -0,0 +1,101 @@ +import json + +have_redis = False +try: + import redis + have_redis = True +except ImportError: + pass + +from ldclient import log +from ldclient.interfaces import FeatureStoreCore +from ldclient.versioned_data_kind import FEATURES + + +class _RedisFeatureStoreCore(FeatureStoreCore): + def __init__(self, url, prefix, max_connections): + if not have_redis: + raise NotImplementedError("Cannot use Redis feature store because redis package is not installed") + self._prefix = prefix or 'launchdarkly' + self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) + self.test_update_hook = None # exposed for testing + log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + self._prefix) + + def _items_key(self, kind): + return "{0}:{1}".format(self._prefix, kind.namespace) + + def init_internal(self, all_data): + pipe = redis.Redis(connection_pool=self._pool).pipeline() + + all_count = 0 + + for kind, items in all_data.items(): + base_key = self._items_key(kind) + pipe.delete(base_key) + for key, item in items.items(): + item_json = json.dumps(item) + pipe.hset(base_key, key, item_json) + all_count = all_count + len(items) + pipe.execute() + log.info("Initialized RedisFeatureStore with %d items", all_count) + + def get_all_internal(self, kind): + r = redis.Redis(connection_pool=self._pool) + all_items = r.hgetall(self._items_key(kind)) + + if all_items is None or all_items is "": + all_items = {} + + results = {} + for key, item_json in all_items.items(): + key = key.decode('utf-8') # necessary in Python 3 + results[key] = json.loads(item_json.decode('utf-8')) + return results + + def get_internal(self, kind, key): + r = redis.Redis(connection_pool=self._pool) + item_json = r.hget(self._items_key(kind), key) + + if item_json is None or item_json is "": + log.debug("RedisFeatureStore: key %s not found in '%s'. Returning None.", key, kind.namespace) + return None + + return json.loads(item_json.decode('utf-8')) + + def upsert_internal(self, kind, item): + r = redis.Redis(connection_pool=self._pool) + base_key = self._items_key(kind) + key = item['key'] + item_json = json.dumps(item) + + while True: + pipeline = r.pipeline() + pipeline.watch(base_key) + old = self.get_internal(kind, key) + if self.test_update_hook is not None: + self.test_update_hook(base_key, key) + if old and old['version'] >= item['version']: + log.debug('RedisFeatureStore: Attempted to %s key: %s version %d with a version that is the same or older: %d in "%s"', + 'delete' if item.get('deleted') else 'update', + key, old['version'], item['version'], kind.namespace) + pipeline.unwatch() + return old + else: + pipeline.multi() + pipeline.hset(base_key, key, item_json) + try: + pipeline.execute() + # Unlike Redis implementations for other platforms, in redis-py a failed WATCH + # produces an exception rather than a null result from execute(). + except redis.exceptions.WatchError: + log.debug("RedisFeatureStore: concurrent modification detected, retrying") + continue + return item + + def initialized_internal(self): + r = redis.Redis(connection_pool=self._pool) + return r.exists(self._items_key(FEATURES)) + + def _before_update_transaction(self, base_key, key): + # exposed for testing + pass diff --git a/ldclient/integrations.py b/ldclient/integrations.py index d2d55354..5cfc468b 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -1,14 +1,17 @@ from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper from ldclient.impl.integrations.consul.consul_feature_store import _ConsulFeatureStoreCore -from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore -from ldclient.redis_feature_store import _RedisFeatureStoreCore +from ldclient.impl.integrations.dynamodb.dynamodb_feature_store import _DynamoDBFeatureStoreCore +from ldclient.impl.integrations.redis.redis_feature_store import _RedisFeatureStoreCore class Consul(object): """Provides factory methods for integrations between the LaunchDarkly SDK and Consul. """ + """The key prefix that is used if you do not specify one.""" + DEFAULT_PREFIX = "launchdarkly" + @staticmethod def new_feature_store(host=None, port=None, @@ -28,7 +31,7 @@ def new_feature_store(host=None, :param string host: Hostname of the Consul server (uses "localhost" if omitted) :param int port: Port of the Consul server (uses 8500 if omitted) - :param string prefix: An optional namespace prefix to be prepended to all Consul keys + :param string prefix: A namespace prefix to be prepended to all Consul keys :param dict consul_opts: Optional parameters for configuring the Consul client, if you need to set any of them besides host and port, as defined in the python-consul API; see https://python-consul.readthedocs.io/en/latest/#consul diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index 16302212..ff93c402 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -1,17 +1,8 @@ -import json +from ldclient.impl.integrations.redis.redis_feature_store import _RedisFeatureStoreCore -have_redis = False -try: - import redis - have_redis = True -except ImportError: - pass - -from ldclient import log from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper -from ldclient.interfaces import FeatureStore, FeatureStoreCore -from ldclient.versioned_data_kind import FEATURES +from ldclient.interfaces import FeatureStore # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating @@ -22,8 +13,8 @@ class RedisFeatureStore(FeatureStore): """A Redis-backed implementation of :class:`ldclient.feature_store.FeatureStore`. - This implementation class is deprecated and may be changed or removed in the future. Please use - :func:`ldclient.integrations.Redis.new_feature_store()`. + This module and this implementation class are deprecated and may be changed or removed in the future. + Please use :func:`ldclient.integrations.Redis.new_feature_store()`. """ def __init__(self, url='redis://localhost:6379/0', @@ -31,8 +22,6 @@ def __init__(self, max_connections=16, expiration=15, capacity=1000): - if not have_redis: - raise NotImplementedError("Cannot use Redis feature store because redis package is not installed") self.core = _RedisFeatureStoreCore(url, prefix, max_connections) # exposed for testing self._wrapper = CachingStoreWrapper(self.core, CacheConfig(expiration=expiration, capacity=capacity)) @@ -54,91 +43,3 @@ def delete(self, kind, key, version): @property def initialized(self): return self._wrapper.initialized - - -class _RedisFeatureStoreCore(FeatureStoreCore): - def __init__(self, url, prefix, max_connections): - - self._prefix = prefix or 'launchdarkly' - self._pool = redis.ConnectionPool.from_url(url=url, max_connections=max_connections) - self.test_update_hook = None # exposed for testing - log.info("Started RedisFeatureStore connected to URL: " + url + " using prefix: " + self._prefix) - - def _items_key(self, kind): - return "{0}:{1}".format(self._prefix, kind.namespace) - - def init_internal(self, all_data): - pipe = redis.Redis(connection_pool=self._pool).pipeline() - - all_count = 0 - - for kind, items in all_data.items(): - base_key = self._items_key(kind) - pipe.delete(base_key) - for key, item in items.items(): - item_json = json.dumps(item) - pipe.hset(base_key, key, item_json) - all_count = all_count + len(items) - pipe.execute() - log.info("Initialized RedisFeatureStore with %d items", all_count) - - def get_all_internal(self, kind): - r = redis.Redis(connection_pool=self._pool) - all_items = r.hgetall(self._items_key(kind)) - - if all_items is None or all_items is "": - all_items = {} - - results = {} - for key, item_json in all_items.items(): - key = key.decode('utf-8') # necessary in Python 3 - results[key] = json.loads(item_json.decode('utf-8')) - return results - - def get_internal(self, kind, key): - r = redis.Redis(connection_pool=self._pool) - item_json = r.hget(self._items_key(kind), key) - - if item_json is None or item_json is "": - log.debug("RedisFeatureStore: key %s not found in '%s'. Returning None.", key, kind.namespace) - return None - - return json.loads(item_json.decode('utf-8')) - - def upsert_internal(self, kind, item): - r = redis.Redis(connection_pool=self._pool) - base_key = self._items_key(kind) - key = item['key'] - item_json = json.dumps(item) - - while True: - pipeline = r.pipeline() - pipeline.watch(base_key) - old = self.get_internal(kind, key) - if self.test_update_hook is not None: - self.test_update_hook(base_key, key) - if old and old['version'] >= item['version']: - log.debug('RedisFeatureStore: Attempted to %s key: %s version %d with a version that is the same or older: %d in "%s"', - 'delete' if item.get('deleted') else 'update', - key, old['version'], item['version'], kind.namespace) - pipeline.unwatch() - return old - else: - pipeline.multi() - pipeline.hset(base_key, key, item_json) - try: - pipeline.execute() - # Unlike Redis implementations for other platforms, in redis-py a failed WATCH - # produces an exception rather than a null result from execute(). - except redis.exceptions.WatchError: - log.debug("RedisFeatureStore: concurrent modification detected, retrying") - continue - return item - - def initialized_internal(self): - r = redis.Redis(connection_pool=self._pool) - return r.exists(self._items_key(FEATURES)) - - def _before_update_transaction(self, base_key, key): - # exposed for testing - pass diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 6370a848..ce0150cf 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -12,8 +12,8 @@ except ImportError: pass -from ldclient.dynamodb_feature_store import _DynamoDBFeatureStoreCore, _DynamoDBHelpers from ldclient.feature_store import CacheConfig, InMemoryFeatureStore +from ldclient.impl.integrations.dynamodb.dynamodb_feature_store import _DynamoDBFeatureStoreCore, _DynamoDBHelpers from ldclient.integrations import Consul, DynamoDB, Redis from ldclient.redis_feature_store import RedisFeatureStore from ldclient.versioned_data_kind import FEATURES From 0baddab8a068d034ca73b5ae72b1aa304cb94314 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 13:20:37 -0800 Subject: [PATCH 040/190] move file data source implementation --- ldclient/file_data_source.py | 255 +----------------- ldclient/impl/integrations/files/__init__.py | 0 .../integrations/files/file_data_source.py | 172 ++++++++++++ ldclient/integrations.py | 105 ++++++++ testing/test_file_data_source.py | 8 +- 5 files changed, 290 insertions(+), 250 deletions(-) create mode 100644 ldclient/impl/integrations/files/__init__.py create mode 100644 ldclient/impl/integrations/files/file_data_source.py diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index ebff765b..61088d50 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -1,29 +1,4 @@ -import json -import os -import six -import traceback - -have_yaml = False -try: - import yaml - have_yaml = True -except ImportError: - pass - -have_watchdog = False -try: - import watchdog - import watchdog.events - import watchdog.observers - have_watchdog = True -except ImportError: - pass - -from ldclient.interfaces import UpdateProcessor -from ldclient.repeating_timer import RepeatingTimer -from ldclient.util import log -from ldclient.versioned_data_kind import FEATURES, SEGMENTS - +from ldclient.impl.integrations.files.file_data_source import _FileDataSource class FileDataSource(UpdateProcessor): @classmethod @@ -32,80 +7,9 @@ def factory(cls, **kwargs): used in a test environment, to operate using a predetermined feature flag state without an actual LaunchDarkly connection. - To use this component, call `FileDataSource.factory`, and store its return value in the - `update_processor_class` property of your LaunchDarkly client configuration. In the options - to `factory`, set `paths` to the file path(s) of your data file(s): - :: - - factory = FileDataSource.factory(paths = [ myFilePath ]) - config = Config(update_processor_class = factory) - - This will cause the client not to connect to LaunchDarkly to get feature flags. The - client may still make network connections to send analytics events, unless you have disabled - this with Config.send_events or Config.offline. - - Flag data files can be either JSON or YAML (in order to use YAML, you must install the 'pyyaml' - package). They contain an object with three possible properties: - - * "flags": Feature flag definitions. - * "flagValues": Simplified feature flags that contain only a value. - * "segments": User segment definitions. - - The format of the data in "flags" and "segments" is defined by the LaunchDarkly application - and is subject to change. Rather than trying to construct these objects yourself, it is simpler - to request existing flags directly from the LaunchDarkly server in JSON format, and use this - output as the starting point for your file. In Linux you would do this: - :: - - curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all - - The output will look something like this (but with many more properties): - :: - - { - "flags": { - "flag-key-1": { - "key": "flag-key-1", - "on": true, - "variations": [ "a", "b" ] - } - }, - "segments": { - "segment-key-1": { - "key": "segment-key-1", - "includes": [ "user-key-1" ] - } - } - } - - Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported - by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to - set specific flag keys to specific values. For that, you can use a much simpler format: - :: - - { - "flagValues": { - "my-string-flag-key": "value-1", - "my-boolean-flag-key": true, - "my-integer-flag-key": 3 - } - } - - Or, in YAML: - :: - - flagValues: - my-string-flag-key: "value-1" - my-boolean-flag-key: true - my-integer-flag-key: 1 - - It is also possible to specify both "flags" and "flagValues", if you want some flags - to have simple values and others to have complex behavior. However, it is an error to use the - same flag key or segment key more than once, either in a single file or across multiple files. - - If the data source encounters any error in any file-- malformed content, a missing file, or a - duplicate key-- it will not load flags from any of the files. - + This module and this implementation class are deprecated and may be changed or removed in the future. + Please use :func:`ldclient.integrations.Files.new_data_source()`. + :param kwargs: See below @@ -123,150 +27,9 @@ def factory(cls, **kwargs): used only if auto_update is true, and if the native file-watching mechanism from 'watchdog' is not being used. The default value is 1 second. """ - return lambda config, store, ready : FileDataSource(store, kwargs, ready) - - def __init__(self, store, options, ready): - self._store = store - self._ready = ready - self._inited = False - self._paths = options.get('paths', []) - if isinstance(self._paths, six.string_types): - self._paths = [ self._paths ] - self._auto_update = options.get('auto_update', False) - self._auto_updater = None - self._poll_interval = options.get('poll_interval', 1) - self._force_polling = options.get('force_polling', False) # used only in tests - - def start(self): - self._load_all() - - if self._auto_update: - self._auto_updater = self._start_auto_updater() - - # We will signal readiness immediately regardless of whether the file load succeeded or failed - - # the difference can be detected by checking initialized() - self._ready.set() - - def stop(self): - if self._auto_updater: - self._auto_updater.stop() - - def initialized(self): - return self._inited - - def _load_all(self): - all_data = { FEATURES: {}, SEGMENTS: {} } - for path in self._paths: - try: - self._load_file(path, all_data) - except Exception as e: - log.error('Unable to load flag data from "%s": %s' % (path, repr(e))) - traceback.print_exc() - return - self._store.init(all_data) - self._inited = True - - def _load_file(self, path, all_data): - content = None - with open(path, 'r') as f: - content = f.read() - parsed = self._parse_content(content) - for key, flag in six.iteritems(parsed.get('flags', {})): - self._add_item(all_data, FEATURES, flag) - for key, value in six.iteritems(parsed.get('flagValues', {})): - self._add_item(all_data, FEATURES, self._make_flag_with_value(key, value)) - for key, segment in six.iteritems(parsed.get('segments', {})): - self._add_item(all_data, SEGMENTS, segment) - - def _parse_content(self, content): - if have_yaml: - return yaml.load(content) # pyyaml correctly parses JSON too - return json.loads(content) - - def _add_item(self, all_data, kind, item): - items = all_data[kind] - key = item.get('key') - if items.get(key) is None: - items[key] = item - else: - raise Exception('In %s, key "%s" was used more than once' % (kind.namespace, key)) - - def _make_flag_with_value(self, key, value): - return { - 'key': key, - 'on': True, - 'fallthrough': { - 'variation': 0 - }, - 'variations': [ value ] - } - - def _start_auto_updater(self): - resolved_paths = [] - for path in self._paths: - try: - resolved_paths.append(os.path.realpath(path)) - except: - log.warn('Cannot watch for changes to data file "%s" because it is an invalid path' % path) - if have_watchdog and not self._force_polling: - return FileDataSource.WatchdogAutoUpdater(resolved_paths, self._load_all) - else: - return FileDataSource.PollingAutoUpdater(resolved_paths, self._load_all, self._poll_interval) - - # Watch for changes to data files using the watchdog package. This uses native OS filesystem notifications - # if available for the current platform. - class WatchdogAutoUpdater(object): - def __init__(self, resolved_paths, reloader): - watched_files = set(resolved_paths) - - class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): - def on_any_event(self, event): - if event.src_path in watched_files: - reloader() - - dir_paths = set() - for path in resolved_paths: - dir_paths.add(os.path.dirname(path)) - - self._observer = watchdog.observers.Observer() - handler = LDWatchdogHandler() - for path in dir_paths: - self._observer.schedule(handler, path) - self._observer.start() - - def stop(self): - self._observer.stop() - self._observer.join() - - # Watch for changes to data files by polling their modification times. This is used if auto-update is - # on but the watchdog package is not installed. - class PollingAutoUpdater(object): - def __init__(self, resolved_paths, reloader, interval): - self._paths = resolved_paths - self._reloader = reloader - self._file_times = self._check_file_times() - self._timer = RepeatingTimer(interval, self._poll) - self._timer.start() - - def stop(self): - self._timer.stop() - - def _poll(self): - new_times = self._check_file_times() - changed = False - for file_path, file_time in six.iteritems(self._file_times): - if new_times.get(file_path) is not None and new_times.get(file_path) != file_time: - changed = True - break - self._file_times = new_times - if changed: - self._reloader() - def _check_file_times(self): - ret = {} - for path in self._paths: - try: - ret[path] = os.path.getmtime(path) - except: - ret[path] = None - return ret + return lambda config, store, ready : _FileDataSource(store, ready, + paths=kwargs.get("paths"), + auto_update=kwargs.get("auto_update", False), + poll_interval=kwargs.get("poll_interval", 1), + force_polling=kwargs.get("force_polling", False)) diff --git a/ldclient/impl/integrations/files/__init__.py b/ldclient/impl/integrations/files/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ldclient/impl/integrations/files/file_data_source.py b/ldclient/impl/integrations/files/file_data_source.py new file mode 100644 index 00000000..9ba6e561 --- /dev/null +++ b/ldclient/impl/integrations/files/file_data_source.py @@ -0,0 +1,172 @@ +import json +import os +import six +import traceback + +have_yaml = False +try: + import yaml + have_yaml = True +except ImportError: + pass + +have_watchdog = False +try: + import watchdog + import watchdog.events + import watchdog.observers + have_watchdog = True +except ImportError: + pass + +from ldclient.interfaces import UpdateProcessor +from ldclient.repeating_timer import RepeatingTimer +from ldclient.util import log +from ldclient.versioned_data_kind import FEATURES, SEGMENTS + +class _FileDataSource(UpdateProcessor): + def __init__(self, store, ready, paths, auto_update, poll_interval, force_polling): + self._store = store + self._ready = ready + self._inited = False + self._paths = paths + if isinstance(self._paths, six.string_types): + self._paths = [ self._paths ] + self._auto_update = auto_update + self._auto_updater = None + self._poll_interval = poll_interval + self._force_polling = force_polling + + def start(self): + self._load_all() + + if self._auto_update: + self._auto_updater = self._start_auto_updater() + + # We will signal readiness immediately regardless of whether the file load succeeded or failed - + # the difference can be detected by checking initialized() + self._ready.set() + + def stop(self): + if self._auto_updater: + self._auto_updater.stop() + + def initialized(self): + return self._inited + + def _load_all(self): + all_data = { FEATURES: {}, SEGMENTS: {} } + for path in self._paths: + try: + self._load_file(path, all_data) + except Exception as e: + log.error('Unable to load flag data from "%s": %s' % (path, repr(e))) + traceback.print_exc() + return + self._store.init(all_data) + self._inited = True + + def _load_file(self, path, all_data): + content = None + with open(path, 'r') as f: + content = f.read() + parsed = self._parse_content(content) + for key, flag in six.iteritems(parsed.get('flags', {})): + self._add_item(all_data, FEATURES, flag) + for key, value in six.iteritems(parsed.get('flagValues', {})): + self._add_item(all_data, FEATURES, self._make_flag_with_value(key, value)) + for key, segment in six.iteritems(parsed.get('segments', {})): + self._add_item(all_data, SEGMENTS, segment) + + def _parse_content(self, content): + if have_yaml: + return yaml.load(content) # pyyaml correctly parses JSON too + return json.loads(content) + + def _add_item(self, all_data, kind, item): + items = all_data[kind] + key = item.get('key') + if items.get(key) is None: + items[key] = item + else: + raise Exception('In %s, key "%s" was used more than once' % (kind.namespace, key)) + + def _make_flag_with_value(self, key, value): + return { + 'key': key, + 'on': True, + 'fallthrough': { + 'variation': 0 + }, + 'variations': [ value ] + } + + def _start_auto_updater(self): + resolved_paths = [] + for path in self._paths: + try: + resolved_paths.append(os.path.realpath(path)) + except: + log.warn('Cannot watch for changes to data file "%s" because it is an invalid path' % path) + if have_watchdog and not self._force_polling: + return _FileDataSource.WatchdogAutoUpdater(resolved_paths, self._load_all) + else: + return _FileDataSource.PollingAutoUpdater(resolved_paths, self._load_all, self._poll_interval) + + # Watch for changes to data files using the watchdog package. This uses native OS filesystem notifications + # if available for the current platform. + class WatchdogAutoUpdater(object): + def __init__(self, resolved_paths, reloader): + watched_files = set(resolved_paths) + + class LDWatchdogHandler(watchdog.events.FileSystemEventHandler): + def on_any_event(self, event): + if event.src_path in watched_files: + reloader() + + dir_paths = set() + for path in resolved_paths: + dir_paths.add(os.path.dirname(path)) + + self._observer = watchdog.observers.Observer() + handler = LDWatchdogHandler() + for path in dir_paths: + self._observer.schedule(handler, path) + self._observer.start() + + def stop(self): + self._observer.stop() + self._observer.join() + + # Watch for changes to data files by polling their modification times. This is used if auto-update is + # on but the watchdog package is not installed. + class PollingAutoUpdater(object): + def __init__(self, resolved_paths, reloader, interval): + self._paths = resolved_paths + self._reloader = reloader + self._file_times = self._check_file_times() + self._timer = RepeatingTimer(interval, self._poll) + self._timer.start() + + def stop(self): + self._timer.stop() + + def _poll(self): + new_times = self._check_file_times() + changed = False + for file_path, file_time in six.iteritems(self._file_times): + if new_times.get(file_path) is not None and new_times.get(file_path) != file_time: + changed = True + break + self._file_times = new_times + if changed: + self._reloader() + + def _check_file_times(self): + ret = {} + for path in self._paths: + try: + ret[path] = os.path.getmtime(path) + except: + ret[path] = None + return ret diff --git a/ldclient/integrations.py b/ldclient/integrations.py index 5cfc468b..fcc89abc 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -2,6 +2,7 @@ from ldclient.feature_store_helpers import CachingStoreWrapper from ldclient.impl.integrations.consul.consul_feature_store import _ConsulFeatureStoreCore from ldclient.impl.integrations.dynamodb.dynamodb_feature_store import _DynamoDBFeatureStoreCore +from ldclient.impl.integrations.files.file_data_source import _FileDataSource from ldclient.impl.integrations.redis.redis_feature_store import _RedisFeatureStoreCore @@ -111,3 +112,107 @@ def new_feature_store(url='redis://localhost:6379/0', wrapper = CachingStoreWrapper(core, caching) wrapper.core = core # exposed for testing return wrapper + + +class Files(object): + """Provides factory methods for integrations with filesystem data. + """ + + @staticmethod + def new_data_source(paths, auto_update=False, poll_interval=1, force_polling=False): + """Provides a way to use local files as a source of feature flag state. This would typically be + used in a test environment, to operate using a predetermined feature flag state without an + actual LaunchDarkly connection. + + To use this component, call `new_data_source`, specifying the file path(s) of your data file(s) + in the `path` parameter; then put the value returned by this method into the `update_processor_class` + property of your LaunchDarkly client configuration (:class:ldclient.config.Config). + :: + + data_source = LaunchDarkly::Integrations::Files.new_data_source(paths=[ myFilePath ]) + config = Config(update_processor_class=data_source) + + This will cause the client not to connect to LaunchDarkly to get feature flags. The + client may still make network connections to send analytics events, unless you have disabled + this with Config.send_events or Config.offline. + + Flag data files can be either JSON or YAML (in order to use YAML, you must install the 'pyyaml' + package). They contain an object with three possible properties: + + * "flags": Feature flag definitions. + * "flagValues": Simplified feature flags that contain only a value. + * "segments": User segment definitions. + + The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + and is subject to change. Rather than trying to construct these objects yourself, it is simpler + to request existing flags directly from the LaunchDarkly server in JSON format, and use this + output as the starting point for your file. In Linux you would do this: + :: + + curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + + The output will look something like this (but with many more properties): + :: + + { + "flags": { + "flag-key-1": { + "key": "flag-key-1", + "on": true, + "variations": [ "a", "b" ] + } + }, + "segments": { + "segment-key-1": { + "key": "segment-key-1", + "includes": [ "user-key-1" ] + } + } + } + + Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + set specific flag keys to specific values. For that, you can use a much simpler format: + :: + + { + "flagValues": { + "my-string-flag-key": "value-1", + "my-boolean-flag-key": true, + "my-integer-flag-key": 3 + } + } + + Or, in YAML: + :: + + flagValues: + my-string-flag-key: "value-1" + my-boolean-flag-key: true + my-integer-flag-key: 1 + + It is also possible to specify both "flags" and "flagValues", if you want some flags + to have simple values and others to have complex behavior. However, it is an error to use the + same flag key or segment key more than once, either in a single file or across multiple files. + + If the data source encounters any error in any file-- malformed content, a missing file, or a + duplicate key-- it will not load flags from any of the files. + + :param array paths: The paths of the source files for loading flag data. These may be absolute paths + or relative to the current working directory. Files will be parsed as JSON unless the 'pyyaml' + package is installed, in which case YAML is also allowed. + :param bool auto_update: (default: false) True if the data source should watch for changes to the source file(s) + and reload flags whenever there is a change. The default implementation of this feature is based on + polling the filesystem, which may not perform well; if you install the 'watchdog' package (not + included by default, to avoid adding unwanted dependencies to the SDK), its native file watching + mechanism will be used instead. Note that auto-updating will only work if all of the files you + specified have valid directory paths at startup time. + :param float poll_interval: (default: 1) The minimum interval, in seconds, between checks for file + modifications-- used only if `auto_update` is true, and if the native file-watching mechanism from + `watchdog` is not being used. + :param bool force_polling: (default: false) True if the data source should implement auto-update via + polling the filesystem even if a native mechanism is available. This is mainly for SDK testing. + + :return: an object (actually a lambda) to be stored in the `update_processor_class` configuration property + """ + return lambda config, store, ready : _FileDataSource(store, ready, paths, auto_update, poll_interval, force_polling) diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 68d1e5b7..2e232ec8 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -9,7 +9,7 @@ from ldclient.client import LDClient from ldclient.config import Config from ldclient.feature_store import InMemoryFeatureStore -from ldclient.file_data_source import FileDataSource +from ldclient.integrations import Files from ldclient.versioned_data_kind import FEATURES, SEGMENTS @@ -94,7 +94,7 @@ def teardown_function(): def make_data_source(**kwargs): global data_source - data_source = FileDataSource.factory(**kwargs)(Config(), store, ready) + data_source = Files.new_data_source(**kwargs)(Config(), store, ready) return data_source def make_temp_file(content): @@ -217,7 +217,7 @@ def test_reloads_modified_file_in_polling_mode(): def test_evaluates_full_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - factory = FileDataSource.factory(paths = path) + factory = Files.new_data_source(paths = path) client = LDClient(config=Config(update_processor_class = factory, send_events = False)) value = client.variation('flag1', { 'key': 'user' }, '') assert value == 'on' @@ -229,7 +229,7 @@ def test_evaluates_full_flag_with_client_as_expected(): def test_evaluates_simplified_flag_with_client_as_expected(): path = make_temp_file(all_properties_json) try: - factory = FileDataSource.factory(paths = path) + factory = Files.new_data_source(paths = path) client = LDClient(config=Config(update_processor_class = factory, send_events = False)) value = client.variation('flag2', { 'key': 'user' }, '') assert value == 'value2' From c8585baab7ee86b2087653451e47013c034b8cd6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:11:58 -0800 Subject: [PATCH 041/190] don't need future.with_statement in Python 2.6+ --- ldclient/client.py | 2 +- ldclient/util.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 30c37e53..9cab10b6 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -1,4 +1,4 @@ -from __future__ import division, with_statement, absolute_import +from __future__ import division, absolute_import import hashlib import hmac diff --git a/ldclient/util.py b/ldclient/util.py index 618a7d9e..4cfb0324 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -1,4 +1,4 @@ -from __future__ import division, with_statement, absolute_import +from __future__ import division, absolute_import import certifi import logging From 2a6d53be3c9e1e2e7df87d3f89a43227cb6d402e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:15:16 -0800 Subject: [PATCH 042/190] don't need future.absolute_import in Python 2.6+ --- ldclient/client.py | 2 +- ldclient/event_processor.py | 2 -- ldclient/feature_requester.py | 2 -- ldclient/sse_client.py | 2 -- ldclient/streaming.py | 1 - ldclient/util.py | 2 +- 6 files changed, 2 insertions(+), 9 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 9cab10b6..29d0c756 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -1,4 +1,4 @@ -from __future__ import division, absolute_import +from __future__ import division import hashlib import hmac diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 3b89420f..9a0cae83 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from collections import namedtuple from email.utils import parsedate import errno diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 786c1708..046c594f 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - from collections import namedtuple import json import urllib3 diff --git a/ldclient/sse_client.py b/ldclient/sse_client.py index c97eb2d4..7e792961 100644 --- a/ldclient/sse_client.py +++ b/ldclient/sse_client.py @@ -1,5 +1,3 @@ -from __future__ import absolute_import - import re import time import warnings diff --git a/ldclient/streaming.py b/ldclient/streaming.py index bac83433..20599eb1 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -1,4 +1,3 @@ -from __future__ import absolute_import from collections import namedtuple import json diff --git a/ldclient/util.py b/ldclient/util.py index 4cfb0324..4612f871 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -1,4 +1,4 @@ -from __future__ import division, absolute_import +from __future__ import division import certifi import logging From c32793ade292b7f80b1b80fafbed0adbb76c44c2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:19:54 -0800 Subject: [PATCH 043/190] don't need future.print_function when you're printing a single string with parentheses --- demo/demo.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/demo/demo.py b/demo/demo.py index 987a05d4..8ac745f4 100644 --- a/demo/demo.py +++ b/demo/demo.py @@ -1,5 +1,3 @@ -from __future__ import print_function - import logging import sys From 4971d17eaa4b79528128a91910e1fe63b2afdfba Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:24:21 -0800 Subject: [PATCH 044/190] don't need future.division since we're not using the / operator --- ldclient/client.py | 2 -- ldclient/util.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 29d0c756..6d6b32c7 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -1,5 +1,3 @@ -from __future__ import division - import hashlib import hmac import threading diff --git a/ldclient/util.py b/ldclient/util.py index 4612f871..fbb2f11d 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -1,5 +1,3 @@ -from __future__ import division - import certifi import logging import sys From 0abadf1efab4637f48c251d7bceaed1d724030e5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 14:24:35 -0800 Subject: [PATCH 045/190] rm unused dependency --- requirements.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8787ac53..f86f3039 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,6 @@ backoff>=1.4.3 certifi>=2018.4.16 expiringdict>=1.1.4 -future>=0.16.0 six>=1.10.0 pyRFC3339>=1.0 jsonpickle==0.9.3 From e228e90771c188893597b5e49c7559efac332a82 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 15:17:12 -0800 Subject: [PATCH 046/190] Revert "rm unused dependency" This reverts commit 0abadf1efab4637f48c251d7bceaed1d724030e5. --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index f86f3039..8787ac53 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,7 @@ backoff>=1.4.3 certifi>=2018.4.16 expiringdict>=1.1.4 +future>=0.16.0 six>=1.10.0 pyRFC3339>=1.0 jsonpickle==0.9.3 From 122d7a613b3e3228e98fa63a0b01b10b038e389f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Jan 2019 17:11:06 -0800 Subject: [PATCH 047/190] don't need builtins.object unless we're defining an iterator, and even then we don't need it --- ldclient/client.py | 2 -- ldclient/sse_client.py | 4 ++++ requirements.txt | 1 - testing/test_ldclient.py | 1 - testing/test_user_filter.py | 1 - 5 files changed, 4 insertions(+), 5 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 6d6b32c7..ff96475b 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -3,8 +3,6 @@ import threading import traceback -from builtins import object - from ldclient.config import Config as Config from ldclient.event_processor import NullEventProcessor from ldclient.feature_requester import FeatureRequesterImpl diff --git a/ldclient/sse_client.py b/ldclient/sse_client.py index 7e792961..5b41413b 100644 --- a/ldclient/sse_client.py +++ b/ldclient/sse_client.py @@ -109,6 +109,10 @@ def __next__(self): return msg + # The following two lines make our iterator class compatible with both Python 2.x and 3.x, + # even though they expect different magic method names. We could accomplish the same thing + # by importing builtins.object and deriving from that, but this way it's easier to see + # what we're doing. if six.PY2: next = __next__ diff --git a/requirements.txt b/requirements.txt index 8787ac53..f86f3039 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,6 @@ backoff>=1.4.3 certifi>=2018.4.16 expiringdict>=1.1.4 -future>=0.16.0 six>=1.10.0 pyRFC3339>=1.0 jsonpickle==0.9.3 diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index a31d2324..0e6c33a2 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -1,4 +1,3 @@ -from builtins import object from ldclient.client import LDClient, Config from ldclient.event_processor import NullEventProcessor from ldclient.feature_store import InMemoryFeatureStore diff --git a/testing/test_user_filter.py b/testing/test_user_filter.py index 15550541..e1711ffb 100644 --- a/testing/test_user_filter.py +++ b/testing/test_user_filter.py @@ -1,4 +1,3 @@ -from builtins import object import json from ldclient.client import Config from ldclient.user_filter import UserFilter From 6a45e700f1cc7e12b4ad44b95c1a3b05208dc15b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 29 Jan 2019 12:49:19 -0800 Subject: [PATCH 048/190] update docs with note on portability --- CONTRIBUTING.md | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 88668de9..fe972301 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,7 +8,7 @@ Development information (for developing this module itself) 1. One-time setup: - mkvirtualenv python-client + mkvirtualenv python-client 1. When working on the project be sure to activate the python-client virtualenv using the technique of your choosing. @@ -17,11 +17,15 @@ Development information (for developing this module itself) pip install -r requirements.txt pip install -r test-requirements.txt -1. Run tests: You'll need redis running locally on its default port of 6379. +1. When running unit tests, in order for `test_feature_store.py` to run, you'll need all of the supported databases (Redis, Consul, DynamoDB) running locally on their default ports. + 1. If you want integration tests to run, set the ```LD_SDK_KEY``` environment variable to a valid production SDK Key. + 1. ```$ py.test testing``` -Developing with different python versions +1. All code must be compatible with all supported Python versions as described in README. Most portability issues are addressed by using the `six` package. We are avoiding the use of `__future__` imports, since they can easily be omitted by mistake causing code in one file to behave differently from another; instead, whenever possible, use an explicit approach that makes it clear what the desired behavior is in all Python versions (e.g. if you want to do floor division, use `//`; if you want to divide as floats, explicitly cast to floats). + +Developing with different Python versions ----------------------------------------- Example for switching to python 3: From 858e001970ea0011a4ae5b84bba70050331aff38 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 29 Jan 2019 12:50:09 -0800 Subject: [PATCH 049/190] typo --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fe972301..af5083c2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -28,6 +28,6 @@ Development information (for developing this module itself) Developing with different Python versions ----------------------------------------- -Example for switching to python 3: +Example for switching to Python 3: ```virtualenv -p `which python3` ~/.virtualenvs/python-client``` \ No newline at end of file From d4d4b8aa2b07e5328c43e90e3244e58a2006bdb6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 30 Jan 2019 17:13:12 -0800 Subject: [PATCH 050/190] update package metadata prior to release --- setup.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index bf59d9a3..8a075cf8 100644 --- a/setup.py +++ b/setup.py @@ -19,12 +19,16 @@ def parse_requirements(filename): install_reqs = parse_requirements('requirements.txt') test_reqs = parse_requirements('test-requirements.txt') redis_reqs = parse_requirements('redis-requirements.txt') +consul_reqs = parse_requirements('consul-requirements.txt') +dynamodb_reqs = parse_requirements('dynamodb-requirements.txt') # reqs is a list of requirement # e.g. ['django==1.5.1', 'mezzanine==1.4.6'] reqs = [ir for ir in install_reqs] testreqs = [ir for ir in test_reqs] redisreqs = [ir for ir in redis_reqs] +consulreqs = [ir for ir in consul_reqs] +dynamodbreqs = [ir for ir in dynamodb_reqs] class PyTest(Command): @@ -63,11 +67,14 @@ def run(self): 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries', ], extras_require={ - "redis": redisreqs + "redis": redisreqs, + "consul": consulreqs, + "dynamodb": dynamodbreqs }, tests_require=testreqs, cmdclass={'test': PyTest}, From eaf677e28bcec07e21457bae8fd14b83a647ccdb Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 11:22:39 -0800 Subject: [PATCH 051/190] add test for whether the package can be installed --- .circleci/config.yml | 5 ++++- .gitignore | 3 +-- scripts/test-packaging.sh | 13 +++++++++++++ 3 files changed, 18 insertions(+), 3 deletions(-) create mode 100755 scripts/test-packaging.sh diff --git a/.circleci/config.yml b/.circleci/config.yml index 714c5ee1..bb23c00d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,7 @@ test-template: &test-template - run: name: install requirements command: | - sudo pip install --upgrade pip setuptools; + sudo pip install --upgrade pip setuptools virtualenv; sudo pip install -r test-requirements.txt; if [[ "$CIRCLE_JOB" != "test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then sudo pip install -r consul-requirements.txt; @@ -32,6 +32,9 @@ test-template: &test-template else pytest -s --junitxml=test-reports/junit.xml testing; fi + - run: + name: test packaging/install + command: ./scripts/test-packaging.sh - store_test_results: path: test-reports - store_artifacts: diff --git a/.gitignore b/.gitignore index d988c61f..c949312e 100644 --- a/.gitignore +++ b/.gitignore @@ -66,5 +66,4 @@ p2venv .idea *.iml .vagrant -ldd/py2 -ldd/py3 +test-packaging-venv diff --git a/scripts/test-packaging.sh b/scripts/test-packaging.sh new file mode 100755 index 00000000..7ce81fa9 --- /dev/null +++ b/scripts/test-packaging.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +set -e + +rm -r dist +python setup.py sdist + +VENV=`pwd`/test-packaging-venv +rm -rf $VENV +virtualenv $VENV +source $VENV/bin/activate + +pip install dist/*.tar.gz From eae17f640fa8775e88b295d1fbec021bc0305e15 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 11:46:33 -0800 Subject: [PATCH 052/190] fix build --- .circleci/config.yml | 8 +++++--- scripts/test-packaging.sh | 13 +++++++------ 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index bb23c00d..6c61b0b8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,11 +16,13 @@ test-template: &test-template name: install requirements command: | sudo pip install --upgrade pip setuptools virtualenv; - sudo pip install -r test-requirements.txt; + virtualenv env; + source ./env/bin/activate; + pip install -r test-requirements.txt; if [[ "$CIRCLE_JOB" != "test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then - sudo pip install -r consul-requirements.txt; + pip install -r consul-requirements.txt; fi; - sudo python setup.py install; + python setup.py install; pip freeze - run: name: run tests diff --git a/scripts/test-packaging.sh b/scripts/test-packaging.sh index 7ce81fa9..8fcb7e3a 100755 --- a/scripts/test-packaging.sh +++ b/scripts/test-packaging.sh @@ -2,12 +2,13 @@ set -e -rm -r dist +rm -rf dist python setup.py sdist -VENV=`pwd`/test-packaging-venv -rm -rf $VENV -virtualenv $VENV -source $VENV/bin/activate +rm -rf test-packaging +mkdir test-packaging +cd test-packaging +virtualenv env +source env/bin/activate -pip install dist/*.tar.gz +pip install ../dist/*.tar.gz From 0892c971f40f0a240a695cccebed5e4ab43434c3 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 11:48:57 -0800 Subject: [PATCH 053/190] fix build --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6c61b0b8..0a6121c2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -27,6 +27,7 @@ test-template: &test-template - run: name: run tests command: | + source ./env/bin/activate; mkdir test-reports; if [[ "$CIRCLE_JOB" == "test-2.7" ]]; then pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing; From e21aa659a0215fdc7e7621b0445cf1b1c8351845 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 11:54:57 -0800 Subject: [PATCH 054/190] fix manifest --- MANIFEST.in | 2 ++ 1 file changed, 2 insertions(+) diff --git a/MANIFEST.in b/MANIFEST.in index 4ec6f0b0..1a398256 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,7 @@ include requirements.txt include README.txt include test-requirements.txt +include consul-requirements.txt +include dynamodb-requirements.txt include redis-requirements.txt include python2.6-requirements.txt \ No newline at end of file From 0520a9bffcad45a334d957acf23de6f4ea21ef2d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 11:58:25 -0800 Subject: [PATCH 055/190] skip test on 3.3 --- .circleci/config.yml | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0a6121c2..5b15a1b7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -36,8 +36,12 @@ test-template: &test-template pytest -s --junitxml=test-reports/junit.xml testing; fi - run: - name: test packaging/install - command: ./scripts/test-packaging.sh + name: test packaging/install # this can't be run on 3.3 because the "wheel" package isn't available; + # that's OK because we never build our actual published package on 3.3 + command: | + if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then + ./scripts/test-packaging.sh; + fi - store_test_results: path: test-reports - store_artifacts: From 9deef2d8eb323fb7767d753e78e26941406bde10 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 12:14:30 -0800 Subject: [PATCH 056/190] misc fixes for 3.3 --- .circleci/config.yml | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5b15a1b7..fb3b8904 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,9 +15,10 @@ test-template: &test-template - run: name: install requirements command: | - sudo pip install --upgrade pip setuptools virtualenv; + sudo pip install --upgrade pip virtualenv; virtualenv env; source ./env/bin/activate; + pip install setuptools; pip install -r test-requirements.txt; if [[ "$CIRCLE_JOB" != "test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then pip install -r consul-requirements.txt; @@ -36,12 +37,8 @@ test-template: &test-template pytest -s --junitxml=test-reports/junit.xml testing; fi - run: - name: test packaging/install # this can't be run on 3.3 because the "wheel" package isn't available; - # that's OK because we never build our actual published package on 3.3 - command: | - if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then - ./scripts/test-packaging.sh; - fi + name: test packaging/install + command: ./scripts/test-packaging.sh - store_test_results: path: test-reports - store_artifacts: From e44abfac3e6f108b7041a594baedc03069b620ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 12:21:13 -0800 Subject: [PATCH 057/190] another 3.3 fix --- .circleci/config.yml | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fb3b8904..a7e1d810 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,14 +16,11 @@ test-template: &test-template name: install requirements command: | sudo pip install --upgrade pip virtualenv; - virtualenv env; - source ./env/bin/activate; - pip install setuptools; - pip install -r test-requirements.txt; + sudo pip install -r test-requirements.txt; if [[ "$CIRCLE_JOB" != "test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then - pip install -r consul-requirements.txt; + sudo pip install -r consul-requirements.txt; fi; - python setup.py install; + sudo python setup.py install; pip freeze - run: name: run tests @@ -38,7 +35,13 @@ test-template: &test-template fi - run: name: test packaging/install - command: ./scripts/test-packaging.sh + # Note, virtualenv isn't supported on Python 3.3 and this test requires virtualenv. But we + # never build our published package on 3.3 anyway. + command: | + if [[ "$CIRCLE_JOB" == "test-3.3" ]]; then + sudo rm -rf dist *.egg-info; + ./scripts/test-packaging.sh; + fi - store_test_results: path: test-reports - store_artifacts: From 273219aef197d4515f7d25d4ca8e308ac23feede Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 12:22:23 -0800 Subject: [PATCH 058/190] misc fixes --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a7e1d810..efc3d5ed 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -25,7 +25,6 @@ test-template: &test-template - run: name: run tests command: | - source ./env/bin/activate; mkdir test-reports; if [[ "$CIRCLE_JOB" == "test-2.7" ]]; then pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing; From b3944f9769b1c7e55efd00659676229a11792068 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 12:23:55 -0800 Subject: [PATCH 059/190] misc fixes --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index efc3d5ed..ec3070fc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,7 +37,7 @@ test-template: &test-template # Note, virtualenv isn't supported on Python 3.3 and this test requires virtualenv. But we # never build our published package on 3.3 anyway. command: | - if [[ "$CIRCLE_JOB" == "test-3.3" ]]; then + if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then sudo rm -rf dist *.egg-info; ./scripts/test-packaging.sh; fi From b4792e6870a736f04531f04dd1941bd3f40ca224 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 14:53:06 -0800 Subject: [PATCH 060/190] add test for importing the built package --- .circleci/config.yml | 2 +- {scripts => test-packaging}/test-packaging.sh | 5 +++-- test-packaging/test.py | 3 +++ 3 files changed, 7 insertions(+), 3 deletions(-) rename {scripts => test-packaging}/test-packaging.sh (76%) create mode 100644 test-packaging/test.py diff --git a/.circleci/config.yml b/.circleci/config.yml index ec3070fc..603bbf54 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -39,7 +39,7 @@ test-template: &test-template command: | if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then sudo rm -rf dist *.egg-info; - ./scripts/test-packaging.sh; + ./test-packaging/test-packaging.sh; fi - store_test_results: path: test-reports diff --git a/scripts/test-packaging.sh b/test-packaging/test-packaging.sh similarity index 76% rename from scripts/test-packaging.sh rename to test-packaging/test-packaging.sh index 8fcb7e3a..50a40364 100755 --- a/scripts/test-packaging.sh +++ b/test-packaging/test-packaging.sh @@ -5,10 +5,11 @@ set -e rm -rf dist python setup.py sdist -rm -rf test-packaging -mkdir test-packaging cd test-packaging +rm -rf env virtualenv env source env/bin/activate pip install ../dist/*.tar.gz + +python test.py diff --git a/test-packaging/test.py b/test-packaging/test.py new file mode 100644 index 00000000..5337cb2e --- /dev/null +++ b/test-packaging/test.py @@ -0,0 +1,3 @@ +import ldclient + +print("Successfully installed and imported ldclient") From 7c2b501794eb4c992314d99d7fe57855c05dafc2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 14:59:39 -0800 Subject: [PATCH 061/190] add submodule imports --- test-packaging/test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test-packaging/test.py b/test-packaging/test.py index 5337cb2e..62f8b636 100644 --- a/test-packaging/test.py +++ b/test-packaging/test.py @@ -1,3 +1,5 @@ import ldclient +import ldclient.integrations +import ldclient.interfaces print("Successfully installed and imported ldclient") From a9d481ce7a5efca76db29f719787cf7be5fbfb21 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 15:08:17 -0800 Subject: [PATCH 062/190] ensure that all packages are included in distribution --- setup.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/setup.py b/setup.py index 7d3c80cc..92d0c7d0 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,4 @@ -try: - from setuptools import setup, Command -except ImportError: - from distutils.core import setup +from setuptools import setup, Command import sys import uuid @@ -51,7 +48,7 @@ def run(self): version=ldclient_version, author='LaunchDarkly', author_email='team@launchdarkly.com', - packages=['ldclient'], + packages=find_packages(), url='https://github.com/launchdarkly/python-client', description='LaunchDarkly SDK for Python', long_description='LaunchDarkly SDK for Python', From 71b821f969377d403635187b8941046e31e31156 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Jan 2019 15:09:27 -0800 Subject: [PATCH 063/190] fix import --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 92d0c7d0..3c3a06b5 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -from setuptools import setup, Command +from setuptools import find_packages, setup, Command import sys import uuid From 06fc3b25b502ac1e59efb71fc449443fa79b833a Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Tue, 5 Feb 2019 10:22:29 -0800 Subject: [PATCH 064/190] add basic pipeline and install deps --- azure-pipelines.yml | 50 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 azure-pipelines.yml diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 00000000..7910ee75 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,50 @@ +jobs: + - job: build + pool: + vmImage: 'vs2017-win2016' + steps: + - task: PowerShell@2 + displayName: 'Setup Dynamo' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf dynamo.zip https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.zip + mkdir dynamo + Expand-Archive -Path dynamo.zip -DestinationPath dynamo + cd dynamo + javaw -D"java.library.path=./DynamoDBLocal_lib" -jar DynamoDBLocal.jar + - task: PowerShell@2 + displayName: 'Setup Consul' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf consul.zip https://releases.hashicorp.com/consul/1.4.2/consul_1.4.2_windows_amd64.zip + mkdir consul + Expand-Archive -Path consul.zip -DestinationPath consul + cd consul + sc.exe create "Consul" binPath="$(System.DefaultWorkingDirectory)/consul/consul.exe agent -dev" + sc.exe start "Consul" + - task: PowerShell@2 + displayName: 'Setup Redis' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf redis.zip https://github.com/MicrosoftArchive/redis/releases/download/win-3.0.504/Redis-x64-3.0.504.zip + mkdir redis + Expand-Archive -Path redis.zip -DestinationPath redis + cd redis + ./redis-server --service-install + ./redis-server --service-start + - task: PowerShell@2 + displayName: 'Setup SDK and Test' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + python --version + pip install -r test-requirements.txt + pip install -r consul-requirements.txt + python setup.py install From 0165540c649c9e1ac5805fef4a011bc49435a480 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Tue, 5 Feb 2019 10:30:02 -0800 Subject: [PATCH 065/190] add pytest --- azure-pipelines.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 7910ee75..b7f19ff3 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -48,3 +48,5 @@ jobs: pip install -r test-requirements.txt pip install -r consul-requirements.txt python setup.py install + mkdir test-reports + pytest -s --junitxml=test-reports/junit.xml testing; From 7c9f4e2765edd5d6c448779bcc4bfc541ba0d49a Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Tue, 5 Feb 2019 10:44:17 -0800 Subject: [PATCH 066/190] remove explicit install of deps --- azure-pipelines.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index b7f19ff3..c83d3c60 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,8 +45,6 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | python --version - pip install -r test-requirements.txt - pip install -r consul-requirements.txt python setup.py install mkdir test-reports pytest -s --junitxml=test-reports/junit.xml testing; From a38b9578dadc603c5254f705b2ce86f453465f63 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Tue, 5 Feb 2019 10:51:33 -0800 Subject: [PATCH 067/190] add other db deps --- azure-pipelines.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index c83d3c60..e4f8f7af 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,6 +45,10 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | python --version + pip install -r test-requirements.txt + pip install -r redis-requirements.txt + pip install -r consul-requirements.txt + pip install -r dynamodb-requirements.txt python setup.py install mkdir test-reports pytest -s --junitxml=test-reports/junit.xml testing; From 0b6d28f73200314563fcef5322d3d4d315526a3d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 12:25:17 -0800 Subject: [PATCH 068/190] major cleanup of doc comments, add Sphinx build script --- docs/Makefile | 19 +++ docs/conf.py | 174 +++++++++++++++++++++++++++ docs/index.rst | 21 ++++ docs/ldclient.rst | 83 +++++++++++++ ldclient/__init__.py | 54 +++++++-- ldclient/client.py | 90 +++++++++----- ldclient/config.py | 32 ++++- ldclient/event_processor.py | 5 + ldclient/event_summarizer.py | 5 + ldclient/feature_requester.py | 5 + ldclient/feature_store.py | 48 +++++++- ldclient/feature_store_helpers.py | 32 ++++- ldclient/file_data_source.py | 34 ++---- ldclient/fixed_thread_pool.py | 5 + ldclient/flag.py | 49 +++++--- ldclient/flags_state.py | 26 +++- ldclient/integrations.py | 193 ++++++++++++------------------ ldclient/interfaces.py | 45 ++++--- ldclient/lru_cache.py | 15 ++- ldclient/memoized_value.py | 13 +- ldclient/operators.py | 5 + ldclient/polling.py | 5 + ldclient/redis_feature_store.py | 7 +- ldclient/repeating_timer.py | 5 + ldclient/rwlock.py | 5 + ldclient/sse_client.py | 9 +- ldclient/streaming.py | 5 + ldclient/user_filter.py | 5 + ldclient/util.py | 5 + ldclient/versioned_data_kind.py | 19 ++- 30 files changed, 763 insertions(+), 255 deletions(-) create mode 100644 docs/Makefile create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/ldclient.rst diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..13edc19b --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,19 @@ +# Minimal makefile for Sphinx documentation +# + +.PHONY: help install html + +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SPHINXPROJ = ldclient-py +SOURCEDIR = . +BUILDDIR = build + +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +install: + pip install sphinx + +html: install + @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..479f3bc8 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + +import os +import sys + +sys.path.insert(0, os.path.abspath('..')) + +import ldclient + +# -- Project information ----------------------------------------------------- + +project = u'ldclient-py' +copyright = u'2019, LaunchDarkly' +author = u'LaunchDarkly' + +# The short X.Y version. +version = ldclient.__version__ +# The full version, including alpha/beta/rc tags. +release = ldclient.__version__ + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.coverage', + 'sphinx.ext.viewcode', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path . +exclude_patterns = ['build'] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'ldclient-pydoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'ldclient-py.tex', u'ldclient-py Documentation', + u'LaunchDarkly', 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'ldclient-py', u'ldclient-py Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'ldclient-py', u'ldclient-py Documentation', + author, 'ldclient-py', 'One line description of project.', + 'Miscellaneous'), +] + + +# -- Extension configuration ------------------------------------------------- + +autodoc_default_options = { + 'members': None, + 'show-inheritance': None, + 'special-members': None, + 'undoc-members': None +} diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..735da978 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,21 @@ +.. ldclient-py documentation master file, created by + sphinx-quickstart on Mon Feb 4 13:16:49 2019. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to ldclient-py's documentation! +======================================= + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + +.. automodule:: ldclient + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/ldclient.rst b/docs/ldclient.rst new file mode 100644 index 00000000..4a212c16 --- /dev/null +++ b/docs/ldclient.rst @@ -0,0 +1,83 @@ +ldclient package +================ + +Module contents +--------------- + +.. automodule:: ldclient + :members: + :undoc-members: + :show-inheritance: + +Submodules +---------- + +ldclient.client module +---------------------- + +.. automodule:: ldclient.client + :members: LDClient + :special-members: __init__ + :show-inheritance: + +ldclient.config module +---------------------- + +.. automodule:: ldclient.config + :members: + :special-members: __init__ + :show-inheritance: + +ldclient.feature\_store module +------------------------------ + +.. automodule:: ldclient.feature_store + :members: + :special-members: __init__ + :show-inheritance: + +ldclient.feature\_store\_helpers module +--------------------------------------- + +.. automodule:: ldclient.feature_store_helpers + :members: + :special-members: __init__ + :show-inheritance: + +ldclient.flag module +-------------------- + +.. automodule:: ldclient.flag + :members: EvaluationDetail + :special-members: __init__ + :show-inheritance: + +ldclient.flags\_state module +---------------------------- + +.. automodule:: ldclient.flags_state + :members: + :show-inheritance: + +ldclient.integrations module +---------------------------- + +.. automodule:: ldclient.integrations + :members: + :special-members: __init__ + :show-inheritance: + +ldclient.interfaces module +-------------------------- + +.. automodule:: ldclient.interfaces + :members: + :special-members: __init__ + :show-inheritance: + +ldclient.versioned\_data\_kind module +------------------------------------- + +.. automodule:: ldclient.versioned_data_kind + :members: + :show-inheritance: diff --git a/ldclient/__init__.py b/ldclient/__init__.py index f693d989..5be96db9 100644 --- a/ldclient/__init__.py +++ b/ldclient/__init__.py @@ -1,3 +1,7 @@ +""" +The ldclient module contains the most common top-level entry points for the SDK. +""" + import logging from ldclient.rwlock import ReadWriteLock @@ -20,12 +24,16 @@ __lock = ReadWriteLock() -# 2 Use Cases: -# 1. Initial setup: sets the config for the uninitialized client -# 2. Allows on-the-fly changing of the config. When this function is called after the client has been initialized -# the client will get re-initialized with the new config. In order for this to work, the return value of -# ldclient.get() should never be assigned def set_config(config): + """Sets the configuration for the shared SDK client instance. + + If this is called prior to :func:`ldclient.get()`, it stores the configuration that will be used when the + client is initialized. If it is called after the client has already been initialized, the client will be + re-initialized with the new configuration (this will result in the next call to :func:`ldclient.get()` + returning a new client instance). + + :param string sdk_key: the new SDK key + """ global __config global __client global __lock @@ -42,12 +50,18 @@ def set_config(config): __lock.unlock() -# 2 Use Cases: -# 1. Initial setup: sets the sdk key for the uninitialized client -# 2. Allows on-the-fly changing of the sdk key. When this function is called after the client has been initialized -# the client will get re-initialized with the new sdk key. In order for this to work, the return value of -# ldclient.get() should never be assigned def set_sdk_key(sdk_key): + """Sets the SDK key for the shared SDK client instance. + + If this is called prior to :func:`ldclient.get()`, it stores the SDK key that will be used when the client is + initialized. If it is called after the client has already been initialized, the client will be + re-initialized with the new SDK key (this will result in the next call to :func:`ldclient.get()` returning a + new client instance). + + If you need to set any configuration options other than the SDK key, use :func:`ldclient.set_config()` instead. + + :param string sdk_key: the new SDK key + """ global __config global __client global __lock @@ -76,6 +90,18 @@ def set_sdk_key(sdk_key): def get(): + """Returns the shared SDK client instance, using the current global configuration. + + To use the SDK as a singleton, first make sure you have called :func:`ldclient.set_sdk_key()` or + :func:`ldclient.set_config()` at startup time. Then `get()` will return the same shared + :class:`ldclient.client.LDClient` instance each time. The client will be initialized if it has + not been already. + + If you need to create multiple client instances with different configurations, instead of this + singleton approach you can call the :class:`ldclient.client.LDClient` constructor directly instead. + + :rtype: ldclient.client.LDClient + """ global __config global __client global __lock @@ -96,8 +122,14 @@ def get(): __lock.unlock() -# Add a NullHandler for Python < 2.7 compatibility class NullHandler(logging.Handler): + """A :class:`logging.Handler` implementation that does nothing. + + .. deprecated:: 6.0.0 + You should not need to use this class. It was originally used in order to support Python 2.6, + which requires that at least one logging handler must always be configured. However, the SDK + no longer supports Python 2.6. + """ def emit(self, record): pass diff --git a/ldclient/client.py b/ldclient/client.py index ff96475b..61be996e 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -1,3 +1,7 @@ +""" +This submodule contains the client class that provides most of the SDK functionality. +""" + import hashlib import hmac import threading @@ -55,6 +59,15 @@ def initialized(self): class LDClient(object): + """The LaunchDarkly SDK client object. + + Applications should configure the client at startup time and continue to use it throughout the lifetime + of the application, rather than creating instances on the fly. The best way to do this is with the + singleton methods :func:`ldclient.set_sdk_key()`, :func:`ldclient.set_config()`, and :func:`ldclient.get()`. + However, you may also call the constructor directly if you need to maintain multiple instances. + + Client instances are thread-safe. + """ def __init__(self, sdk_key=None, config=None, start_wait=5): """Constructs a new LDClient instance. @@ -63,7 +76,7 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): client instance. :param string sdk_key: the SDK key for your LaunchDarkly environment - :param Config config: optional custom configuration + :param ldclient.config.Config config: optional custom configuration :param float start_wait: the number of seconds to wait for a successful connection to LaunchDarkly """ check_uwsgi() @@ -157,9 +170,9 @@ def _send_event(self, event): def track(self, event_name, user, data=None): """Tracks that a user performed an event. - :param string event_name: The name of the event. - :param dict user: The attributes of the user. - :param data: Optional additional data associated with the event. + :param string event_name: the name of the event, which may correspond to a goal in A/B tests + :param dict user: the attributes of the user + :param data: optional additional data associated with the event """ self._sanitize_user(user) if user is None or user.get('key') is None: @@ -169,6 +182,10 @@ def track(self, event_name, user, data=None): def identify(self, user): """Registers the user. + This simply creates an analytics event that will transmit the given user properties to + LaunchDarkly, so that the user will be visible on your dashboard even if you have not + evaluated any flags for that user. It has no other effect. + :param dict user: attributes of the user to register """ self._sanitize_user(user) @@ -192,13 +209,20 @@ def is_initialized(self): def flush(self): """Flushes all pending events. + + Normally, batches of events are delivered in the background at intervals determined by the + ``flush_interval`` property of :class:`ldclient.config.Config`. Calling ``flush()`` + schedules the next event delivery to be as soon as possible; however, the delivery still + happens asynchronously on a worker thread, so this method will return immediately. """ if self._config.offline: return return self._event_processor.flush() def toggle(self, key, user, default): - """Deprecated synonym for `variation`. + """Deprecated synonym for :func:`variation()`. + + .. deprecated:: 2.0.0 """ log.warn("Deprecated method: toggle() called. Use variation() instead.") return self.variation(key, user, default) @@ -215,27 +239,18 @@ def variation(self, key, user, default): return self._evaluate_internal(key, user, default, False).value def variation_detail(self, key, user, default): - """Determines the variation of a feature flag for a user, like `variation`, but also - provides additional information about how this value was calculated. - - The return value is an EvaluationDetail object, which has three properties: - - `value`: the value that was calculated for this user (same as the return value - of `variation`) - - `variation_index`: the positional index of this value in the flag, e.g. 0 for the - first variation - or `None` if the default value was returned - - `reason`: a hash describing the main reason why this value was selected. + """Determines the variation of a feature flag for a user, like :func:`variation()`, but also + provides additional information about how this value was calculated, in the form of an + :class:`ldclient.flag.EvaluationDetail` object. - The `reason` will also be included in analytics events, if you are capturing - detailed event data for this flag. + Calling this method also causes the "reason" data to be included in analytics events, + if you are capturing detailed event data for this flag. :param string key: the unique key for the feature flag :param dict user: a dictionary containing parameters for the end user requesting the flag :param object default: the default value of the flag, to be used if the value is not available from LaunchDarkly - :return: an EvaluationDetail object describing the result + :return: an object describing the result :rtype: EvaluationDetail """ return self._evaluate_internal(key, user, default, True) @@ -307,8 +322,8 @@ def send_event(value, variation=None, flag=None, reason=None): def all_flags(self, user): """Returns all feature flag values for the given user. - This method is deprecated - please use `all_flags_state` instead. Current versions of the - client-side SDK will not generate analytics events correctly if you pass the result of `all_flags`. + This method is deprecated - please use :func:`all_flags_state()` instead. Current versions of the + client-side SDK will not generate analytics events correctly if you pass the result of ``all_flags``. :param dict user: the end user requesting the feature flags :return: a dictionary of feature flag keys to values; returns None if the client is offline, @@ -322,19 +337,27 @@ def all_flags(self, user): def all_flags_state(self, user, **kwargs): """Returns an object that encapsulates the state of all feature flags for a given user, - including the flag values and also metadata that can be used on the front end. + including the flag values and also metadata that can be used on the front end. See the + JavaScript SDK Reference Guide on + `Bootstrapping `_. This method does not send analytics events back to LaunchDarkly. :param dict user: the end user requesting the feature flags - :param kwargs: optional parameters affecting how the state is computed: set - `client_side_only=True` to limit it to only flags that are marked for use with the - client-side SDK (by default, all flags are included); set `with_reasons=True` to - include evaluation reasons in the state (see `variation_detail`); set - `details_only_for_tracked_flags=True` to omit any metadata that is normally only - used for event generation, such as flag versions and evaluation reasons, unless - the flag has event tracking or debugging turned on - :return: a FeatureFlagsState object (will never be None; its 'valid' property will be False + :param kwargs: optional parameters affecting how the state is computed - see below + + :Keyword Arguments: + * **client_side_only** (*boolean*) -- + set to True to limit it to only flags that are marked for use with the client-side SDK + (by default, all flags are included) + * **with_reasons** (*boolean*) -- + set to True to include evaluation reasons in the state (see :func:`variation_detail()`) + * **details_only_for_tracked_flags** (*boolean*) -- + set to True to omit any metadata that is normally only used for event generation, such + as flag versions and evaluation reasons, unless the flag has event tracking or debugging + turned on + + :return: a FeatureFlagsState object (will never be None; its ``valid`` property will be False if the client is offline, has not been initialized, or the user is None or has no key) :rtype: FeatureFlagsState """ @@ -381,9 +404,10 @@ def all_flags_state(self, user, **kwargs): return state def secure_mode_hash(self, user): - """Generates a hash value for a user. + """Generates a hash value for a user, for use by the JavaScript SDK. - For more info: https://github.com/launchdarkly/js-client#secure-mode + For more information, see the JavaScript SDK Reference Guide on + `Secure mode `_. :param dict user: the attributes of the user :return: a hash string that can be passed to the front end diff --git a/ldclient/config.py b/ldclient/config.py index 35af5110..2d99c72e 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -1,3 +1,9 @@ +""" +This submodule contains the :class:`Config` class for custom configuration of the SDK client. + +Note that the same class can also be imported from the ``ldclient.client`` submodule. +""" + from ldclient.event_processor import DefaultEventProcessor from ldclient.feature_store import InMemoryFeatureStore from ldclient.util import log @@ -7,6 +13,11 @@ class Config(object): + """Advanced configuration options for the SDK client. + + To use these options, create an instance of ``Config`` and pass it to either :func:`ldclient.set_config()` + if you are using the singleton client, or the :class:`ldclient.client.LDClient` constructor otherwise. + """ def __init__(self, sdk_key=None, base_uri='https://app.launchdarkly.com', @@ -59,7 +70,7 @@ def __init__(self, :param bool offline: Whether the client should be initialized in offline mode. In offline mode, default values are returned for all flags and no remote network requests are made. By default, this is false. - :type update_processor_class: (str, Config, FeatureStore) -> UpdateProcessor + :type update_processor_class: (str, ldclient.config.Config, FeatureStore) -> UpdateProcessor :param float poll_interval: The number of seconds between polls for flag updates if streaming is off. :param bool use_ldd: Whether you are using the LaunchDarkly relay proxy in daemon mode. In this configuration, the client will not use a streaming connection to listen for updates, but instead @@ -79,9 +90,9 @@ def __init__(self, By default, events will only include the user key, except for one "index" event that provides the full details for the user. :param feature_requester_class: A factory for a FeatureRequester implementation taking the sdk key and config - :type feature_requester_class: (str, Config, FeatureStore) -> FeatureRequester + :type feature_requester_class: (str, ldclient.config.Config, FeatureStore) -> FeatureRequester :param event_processor_class: A factory for an EventProcessor implementation taking the config - :type event_processor_class: (Config) -> EventProcessor + :type event_processor_class: (ldclient.config.Config) -> EventProcessor :param update_processor_class: A factory for an UpdateProcessor implementation taking the sdk key, config, and FeatureStore implementation """ @@ -118,9 +129,18 @@ def __init__(self, @classmethod def default(cls): + """Returns a ``Config`` instance with default values for all properties. + + :rtype: ldclient.config.Config + """ return cls() def copy_with_new_sdk_key(self, new_sdk_key): + """Returns a new ``Config`` instance that is the same as this one, except for having a different SDK key. + + :param string new_sdk_key: the new SDK key + :rtype: ldclient.config.Config + """ return Config(sdk_key=new_sdk_key, base_uri=self.__base_uri, events_uri=self.__events_uri, @@ -147,6 +167,8 @@ def copy_with_new_sdk_key(self, new_sdk_key): inline_users_in_events=self.__inline_users_in_events) def get_default(self, key, default): + """Used internally by the SDK client to get the default value for a flag. + """ return default if key not in self.__defaults else self.__defaults[key] @property @@ -159,6 +181,10 @@ def base_uri(self): @property def get_latest_flags_uri(self): + """Used internally, deprecated. + + .. deprecated:: 5.0.0 + """ return self.__base_uri + GET_LATEST_FEATURES_PATH @property diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 9a0cae83..03134b64 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -1,3 +1,8 @@ +""" +Implementation details of the analytics event delivery component. +""" +# currently excluded from documentation - see ldclient.rst + from collections import namedtuple from email.utils import parsedate import errno diff --git a/ldclient/event_summarizer.py b/ldclient/event_summarizer.py index 5a9f19ea..2d084ddc 100644 --- a/ldclient/event_summarizer.py +++ b/ldclient/event_summarizer.py @@ -1,3 +1,8 @@ +""" +Implementation details of the analytics event delivery component. +""" +# currently excluded from documentation - see ldclient.rst + from collections import namedtuple diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 046c594f..4414fb7a 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -1,3 +1,8 @@ +""" +Default implementation of feature flag polling requests. +""" +# currently excluded from documentation - see ldclient.rst + from collections import namedtuple import json import urllib3 diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index fccef5b5..efabe82e 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -1,3 +1,11 @@ +""" +This submodule contains basic classes related to the feature store. + +The feature store is the SDK component that holds the last known state of all feature flags, as +received from LaunchDarkly. This submodule does not include specific integrations with external +storage systems; those are in :class:`ldclient.integrations`. +""" + from collections import OrderedDict, defaultdict from ldclient.util import log from ldclient.interfaces import FeatureStore @@ -16,10 +24,11 @@ def __init__(self, expiration = DEFAULT_EXPIRATION, capacity = DEFAULT_CAPACITY): """Constructs an instance of CacheConfig. - :param float expiration: The cache TTL, in seconds. Items will be evicted from the cache after + + :param float expiration: the cache TTL, in seconds. Items will be evicted from the cache after this amount of time from the time when they were originally cached. If the time is less than or equal to zero, caching is disabled. - :param int capacity: The maximum number of items that can be in the cache at a time. + :param int capacity: the maximum number of items that can be in the cache at a time """ self._expiration = expiration self._capacity = capacity @@ -28,41 +37,58 @@ def __init__(self, def default(): """Returns an instance of CacheConfig with default properties. By default, caching is enabled. This is the same as calling the constructor with no parameters. - :rtype: CacheConfig + + :rtype: ldclient.feature_store.CacheConfig """ return CacheConfig() @staticmethod def disabled(): """Returns an instance of CacheConfig specifying that caching should be disabled. - :rtype: CacheConfig + + :rtype: ldclient.feature_store.CacheConfig """ return CacheConfig(expiration = 0) @property def enabled(self): + """Returns True if caching is enabled in this configuration. + + :rtype: bool + """ return self._expiration > 0 @property def expiration(self): + """Returns the configured cache TTL, in seconds. + + :rtype: float + """ return self._expiration @property def capacity(self): + """Returns the configured maximum number of cacheable items. + + :rtype: int + """ return self._capacity class InMemoryFeatureStore(FeatureStore): - """ - In-memory implementation of a store that holds feature flags and related data received from the streaming API. + """The default feature store implementation, which holds all data in a thread-safe data structure in memory. """ def __init__(self): + """Constructs an instance of InMemoryFeatureStore. + """ self._lock = ReadWriteLock() self._initialized = False self._items = defaultdict(dict) def get(self, kind, key, callback): + """ + """ try: self._lock.rlock() itemsOfKind = self._items[kind] @@ -78,6 +104,8 @@ def get(self, kind, key, callback): self._lock.runlock() def all(self, kind, callback): + """ + """ try: self._lock.rlock() itemsOfKind = self._items[kind] @@ -86,6 +114,8 @@ def all(self, kind, callback): self._lock.runlock() def init(self, all_data): + """ + """ try: self._lock.rlock() self._items.clear() @@ -98,6 +128,8 @@ def init(self, all_data): # noinspection PyShadowingNames def delete(self, kind, key, version): + """ + """ try: self._lock.rlock() itemsOfKind = self._items[kind] @@ -109,6 +141,8 @@ def delete(self, kind, key, version): self._lock.runlock() def upsert(self, kind, item): + """ + """ key = item['key'] try: self._lock.rlock() @@ -122,6 +156,8 @@ def upsert(self, kind, item): @property def initialized(self): + """ + """ try: self._lock.rlock() return self._initialized diff --git a/ldclient/feature_store_helpers.py b/ldclient/feature_store_helpers.py index 2ba83713..58f9a848 100644 --- a/ldclient/feature_store_helpers.py +++ b/ldclient/feature_store_helpers.py @@ -1,18 +1,28 @@ +""" +This submodule contains support code for writing feature store implementations. +""" + from expiringdict import ExpiringDict from ldclient.interfaces import FeatureStore class CachingStoreWrapper(FeatureStore): - """CachingStoreWrapper is a partial implementation of :class:ldclient.interfaces.FeatureStore that - delegates the basic functionality to an implementation of :class:ldclient.interfaces.FeatureStoreCore - - while adding optional caching behavior and other logic that would otherwise be repeated in every - feature store implementation. This makes it easier to create new database integrations by implementing - only the database-specific logic. + """A partial implementation of :class:`ldclient.interfaces.FeatureStore`. + + This class delegates the basic functionality to an implementation of + :class:`ldclient.interfaces.FeatureStoreCore` - while adding optional caching behavior and other logic + that would otherwise be repeated in every feature store implementation. This makes it easier to create + new database integrations by implementing only the database-specific logic. """ __INITED_CACHE_KEY__ = "$inited" def __init__(self, core, cache_config): + """Constructs an instance by wrapping a core implementation object. + + :param FeatureStoreCore core: the implementation object + :param ldclient.feature_store.CacheConfig cache_config: the caching parameters + """ self._core = core if cache_config.enabled: self._cache = ExpiringDict(max_len=cache_config.capacity, max_age_seconds=cache_config.expiration) @@ -21,6 +31,8 @@ def __init__(self, core, cache_config): self._inited = False def init(self, all_data): + """ + """ self._core.init_internal(all_data) if self._cache is not None: self._cache.clear() @@ -31,6 +43,8 @@ def init(self, all_data): self._inited = True def get(self, kind, key, callback=lambda x: x): + """ + """ if self._cache is not None: cache_key = self._item_cache_key(kind, key) cached_item = self._cache.get(cache_key) @@ -43,6 +57,8 @@ def get(self, kind, key, callback=lambda x: x): return callback(self._item_if_not_deleted(item)) def all(self, kind, callback=lambda x: x): + """ + """ if self._cache is not None: cache_key = self._all_cache_key(kind) cached_items = self._cache.get(cache_key) @@ -54,10 +70,14 @@ def all(self, kind, callback=lambda x: x): return callback(items) def delete(self, kind, key, version): + """ + """ deleted_item = { "key": key, "version": version, "deleted": True } self.upsert(kind, deleted_item) def upsert(self, kind, item): + """ + """ new_state = self._core.upsert_internal(kind, item) if self._cache is not None: self._cache[self._item_cache_key(kind, item.get('key'))] = [new_state] @@ -65,6 +85,8 @@ def upsert(self, kind, item): @property def initialized(self): + """ + """ if self._inited: return True if self._cache is None: diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 61088d50..527acec5 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -1,31 +1,21 @@ +""" +Deprecated entry point for a component that has been moved. +""" +# currently excluded from documentation - see ldclient.rst + from ldclient.impl.integrations.files.file_data_source import _FileDataSource +from ldclient.interfaces import UpdateProcessor class FileDataSource(UpdateProcessor): @classmethod def factory(cls, **kwargs): - """Provides a way to use local files as a source of feature flag state. This would typically be - used in a test environment, to operate using a predetermined feature flag state without an - actual LaunchDarkly connection. - - This module and this implementation class are deprecated and may be changed or removed in the future. - Please use :func:`ldclient.integrations.Files.new_data_source()`. + """Provides a way to use local files as a source of feature flag state. - :param kwargs: - See below - - :Keyword arguments: - * **paths** (array): The paths of the source files for loading flag data. These may be absolute paths - or relative to the current working directory. Files will be parsed as JSON unless the 'pyyaml' - package is installed, in which case YAML is also allowed. - * **auto_update** (boolean): True if the data source should watch for changes to the source file(s) - and reload flags whenever there is a change. The default implementation of this feature is based on - polling the filesystem, which may not perform well; if you install the 'watchdog' package (not - included by default, to avoid adding unwanted dependencies to the SDK), its native file watching - mechanism will be used instead. Note that auto-updating will only work if all of the files you - specified have valid directory paths at startup time. - * **poll_interval** (float): The minimum interval, in seconds, between checks for file modifications - - used only if auto_update is true, and if the native file-watching mechanism from 'watchdog' is not - being used. The default value is 1 second. + .. deprecated:: 6.8.0 + This module and this implementation class are deprecated and may be changed or removed in the future. + Please use :func:`ldclient.integrations.Files.new_data_source()`. + + The keyword arguments are the same as the arguments to :func:`ldclient.integrations.Files.new_data_source()`. """ return lambda config, store, ready : _FileDataSource(store, ready, diff --git a/ldclient/fixed_thread_pool.py b/ldclient/fixed_thread_pool.py index a3c769e4..17ded510 100644 --- a/ldclient/fixed_thread_pool.py +++ b/ldclient/fixed_thread_pool.py @@ -1,3 +1,8 @@ +""" +Internal helper class for thread management. +""" +# currently excluded from documentation - see ldclient.rst + from threading import Event, Lock, Thread # noinspection PyBroadException diff --git a/ldclient/flag.py b/ldclient/flag.py index d4fcbdf3..83986092 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -1,3 +1,7 @@ +""" +This submodule contains a helper class for feature flag evaluation, as well as some implementation details. +""" + from collections import namedtuple import hashlib import logging @@ -18,10 +22,12 @@ class EvaluationDetail(object): """ - The return type of LDClient.variation_detail, combining the result of a flag evaluation - with information about how it was calculated. + The return type of :func:`ldclient.client.LDClient.variation_detail()`, combining the result of a + flag evaluation with information about how it was calculated. """ def __init__(self, value, variation_index, reason): + """Constructs an instance. + """ self.__value = value self.__variation_index = variation_index self.__reason = reason @@ -29,14 +35,17 @@ def __init__(self, value, variation_index, reason): @property def value(self): """The result of the flag evaluation. This will be either one of the flag's - variations or the default value that was passed to the variation() method. + variations or the default value that was passed to the + :func:`ldclient.client.LDClient.variation_detail()` method. """ return self.__value @property def variation_index(self): """The index of the returned value within the flag's list of variations, e.g. - 0 for the first variation - or None if the default value was returned. + 0 for the first variation -- or None if the default value was returned. + + :rtype: int """ return self.__variation_index @@ -45,28 +54,34 @@ def reason(self): """A dictionary describing the main factor that influenced the flag evaluation value. It contains the following properties: - 'kind': The general category of reason, as follows: 'OFF' - the flag was off; - 'FALLTHROUGH' - the flag was on but the user did not match any targets or rules; - 'TARGET_MATCH' - the user was specifically targeted for this flag; 'RULE_MATCH' - - the user matched one of the flag's rules; 'PREREQUISITE_FAILED' - the flag was - considered off because it had at least one prerequisite flag that did not return - the desired variation; 'ERROR' - the flag could not be evaluated due to an - unexpected error. + * ``kind``: The general category of reason, as follows: + + * ``"OFF"``: the flag was off + * ``"FALLTHROUGH"`` -- the flag was on but the user did not match any targets or rules + * ``"TARGET_MATCH"`` -- the user was specifically targeted for this flag + * ``"RULE_MATCH"`` -- the user matched one of the flag's rules + * ``"PREREQUISITE_FAILED"`` -- the flag was considered off because it had at least one + prerequisite flag that did not return the desired variation + * ``"ERROR"`` - the flag could not be evaluated due to an unexpected error. - 'ruleIndex', 'ruleId': The positional index and unique identifier of the matched - rule, if the kind was 'RULE_MATCH' + * ``ruleIndex``, ``ruleId``: The positional index and unique identifier of the matched + rule, if the kind was ``RULE_MATCH`` - 'prerequisiteKey': The flag key of the prerequisite that failed, if the kind was - 'PREREQUISITE_FAILED' + * ``prerequisiteKey``: The flag key of the prerequisite that failed, if the kind was + ``PREREQUISITE_FAILED`` - 'errorKind': further describes the nature of the error if the kind was 'ERROR', - e.g. 'FLAG_NOT_FOUND' + * ``errorKind``: further describes the nature of the error if the kind was ``ERROR``, + e.g. ``"FLAG_NOT_FOUND"`` + + :rtype: dict """ return self.__reason def is_default_value(self): """Returns True if the flag evaluated to the default value rather than one of its variations. + + :rtype: bool """ return self.__variation_index is None diff --git a/ldclient/flags_state.py b/ldclient/flags_state.py index c5a8ab41..a0ba668b 100644 --- a/ldclient/flags_state.py +++ b/ldclient/flags_state.py @@ -1,20 +1,25 @@ +""" +This submodule contains a helper class for feature flag evaluation. +""" + import json import time class FeatureFlagsState(object): """ A snapshot of the state of all feature flags with regard to a specific user, generated by - calling the client's all_flags_state method. Serializing this object to JSON, using the - to_json_dict method or jsonpickle, will produce the appropriate data structure for - bootstrapping the LaunchDarkly JavaScript client. + calling the :func:`ldclient.client.LDClient.all_flags_state()` method. Serializing this + object to JSON, using the :func:`to_json_dict` method or ``jsonpickle``, will produce the + appropriate data structure for bootstrapping the LaunchDarkly JavaScript client. See the + JavaScript SDK Reference Guide on `Bootstrapping `_. """ def __init__(self, valid): self.__flag_values = {} self.__flag_metadata = {} self.__valid = valid + # Used internally to build the state map def add_flag(self, flag, value, variation, reason, details_only_if_tracked): - """Used internally to build the state map.""" key = flag['key'] self.__flag_values[key] = value meta = {} @@ -39,11 +44,14 @@ def add_flag(self, flag, value, variation, reason, details_only_if_tracked): def valid(self): """True if this object contains a valid snapshot of feature flag state, or False if the state could not be computed (for instance, because the client was offline or there was no user). + + :rtype: bool """ return self.__valid def get_flag_value(self, key): """Returns the value of an individual feature flag at the time the state was recorded. + :param string key: the feature flag key :return: the flag's value; None if the flag returned the default value, or if there was no such flag """ @@ -51,9 +59,11 @@ def get_flag_value(self, key): def get_flag_reason(self, key): """Returns the evaluation reason for an individual feature flag at the time the state was recorded. + :param string key: the feature flag key :return: a dictionary describing the reason; None if reasons were not recorded, or if there was no such flag + :rtype: dict """ meta = self.__flag_metadata.get(key) return None if meta is None else meta.get('reason') @@ -63,7 +73,9 @@ def to_values_map(self): default value, its value will be None. Do not use this method if you are passing data to the front end to "bootstrap" the JavaScript client. - Instead, use to_json_dict. + Instead, use :func:`to_json_dict()`. + + :rtype: dict """ return self.__flag_values @@ -71,6 +83,8 @@ def to_json_dict(self): """Returns a dictionary suitable for passing as JSON, in the format used by the LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end in order to "bootstrap" the JavaScript client. + + :rtype: dict """ ret = self.__flag_values.copy() ret['$flagsState'] = self.__flag_metadata @@ -79,6 +93,8 @@ def to_json_dict(self): def to_json_string(self): """Same as to_json_dict, but serializes the JSON structure into a string. + + :rtype: string """ return json.dumps(self.to_json_dict()) diff --git a/ldclient/integrations.py b/ldclient/integrations.py index fcc89abc..a1e9d2f8 100644 --- a/ldclient/integrations.py +++ b/ldclient/integrations.py @@ -1,3 +1,8 @@ +""" +This submodule contains factory/configuration methods for integrating the SDK with services +other than LaunchDarkly. +""" + from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper from ldclient.impl.integrations.consul.consul_feature_store import _ConsulFeatureStoreCore @@ -19,25 +24,30 @@ def new_feature_store(host=None, prefix=None, consul_opts=None, caching=CacheConfig.default()): - """Creates a Consul-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + """Creates a Consul-backed implementation of :class:`ldclient.interfaces.FeatureStore`. For more details about how and why you can use a persistent feature store, see the - SDK reference guide: https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store + `SDK reference guide `_. + + To use this method, you must first install the ``python-consul`` package. Then, put the object + returned by this method into the ``feature_store`` property of your client configuration + (:class:`ldclient.config.Config`). + :: - To use this method, you must first install the `python-consul` package. Then, put the object - returned by this method into the `feature_store` property of your client configuration - (:class:ldclient.config.Config). + from ldclient.integrations import Consul + store = Consul.new_feature_store() + config = Config(feature_store=store) - Note that `python-consul` is not available for Python 3.3 or 3.4, so this feature cannot be + Note that ``python-consul`` is not available for Python 3.3 or 3.4, so this feature cannot be used in those Python versions. - :param string host: Hostname of the Consul server (uses "localhost" if omitted) - :param int port: Port of the Consul server (uses 8500 if omitted) - :param string prefix: A namespace prefix to be prepended to all Consul keys - :param dict consul_opts: Optional parameters for configuring the Consul client, if you need - to set any of them besides host and port, as defined in the python-consul API; see - https://python-consul.readthedocs.io/en/latest/#consul - :param CacheConfig caching: Specifies whether local caching should be enabled and if so, - sets the cache properties; defaults to `CacheConfig.default()` + :param string host: hostname of the Consul server (uses ``localhost`` if omitted) + :param int port: port of the Consul server (uses 8500 if omitted) + :param string prefix: a namespace prefix to be prepended to all Consul keys + :param dict consul_opts: optional parameters for configuring the Consul client, if you need + to set any of them besides host and port, as defined in the + `python-consul API `_ + :param CacheConfig caching: specifies whether local caching should be enabled and if so, + sets the cache properties; defaults to :func:`ldclient.feature_store.CacheConfig.default()` """ core = _ConsulFeatureStoreCore(host, port, prefix, consul_opts) return CachingStoreWrapper(core, caching) @@ -52,13 +62,18 @@ def new_feature_store(table_name, prefix=None, dynamodb_opts={}, caching=CacheConfig.default()): - """Creates a DynamoDB-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + """Creates a DynamoDB-backed implementation of :class:`ldclient.interfaces.FeatureStore`. For more details about how and why you can use a persistent feature store, see the - SDK reference guide: https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store + `SDK reference guide `_. - To use this method, you must first install the `boto3` package containing the AWS SDK gems. - Then, put the object returned by this method into the `feature_store` property of your - client configuration (:class:ldclient.config.Config). + To use this method, you must first install the ``boto3`` package containing the AWS SDK gems. + Then, put the object returned by this method into the ``feature_store`` property of your + client configuration (:class:`ldclient.config.Config`). + :: + + from ldclient.integrations import DynamoDB + store = DynamoDB.new_feature_store("my-table-name") + config = Config(feature_store=store) Note that the DynamoDB table must already exist; the LaunchDarkly SDK does not create the table automatically, because it has no way of knowing what additional properties (such as permissions @@ -67,14 +82,14 @@ def new_feature_store(table_name, By default, the DynamoDB client will try to get your AWS credentials and region name from environment variables and/or local configuration files, as described in the AWS SDK documentation. - You may also pass configuration settings in `dynamodb_opts`. - - :param string table_name: The name of an existing DynamoDB table - :param string prefix: An optional namespace prefix to be prepended to all DynamoDB keys - :param dict dynamodb_opts: Optional parameters for configuring the DynamoDB client, as defined in - the boto3 API; see https://boto3.amazonaws.com/v1/documentation/api/latest/reference/core/session.html#boto3.session.Session.client - :param CacheConfig caching: Specifies whether local caching should be enabled and if so, - sets the cache properties; defaults to `CacheConfig.default()` + You may also pass configuration settings in ``dynamodb_opts``. + + :param string table_name: the name of an existing DynamoDB table + :param string prefix: an optional namespace prefix to be prepended to all DynamoDB keys + :param dict dynamodb_opts: optional parameters for configuring the DynamoDB client, as defined in + the `boto3 API `_ + :param CacheConfig caching: specifies whether local caching should be enabled and if so, + sets the cache properties; defaults to :func:`ldclient.feature_store.CacheConfig.default()` """ core = _DynamoDBFeatureStoreCore(table_name, prefix, dynamodb_opts) return CachingStoreWrapper(core, caching) @@ -92,21 +107,26 @@ def new_feature_store(url='redis://localhost:6379/0', prefix='launchdarkly', max_connections=16, caching=CacheConfig.default()): - """Creates a Redis-backed implementation of `:class:ldclient.feature_store.FeatureStore`. + """Creates a Redis-backed implementation of :class:`ldclient.interfaces.FeatureStore`. For more details about how and why you can use a persistent feature store, see the - SDK reference guide: https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store - - To use this method, you must first install the `redis` package. Then, put the object - returned by this method into the `feature_store` property of your client configuration - (:class:ldclient.config.Config). - - :param string url: The URL of the Redis host; defaults to `DEFAULT_URL` - :param string prefix: A namespace prefix to be prepended to all Redis keys; defaults to - `DEFAULT_PREFIX` - :param int max_connections: The maximum number of Redis connections to keep in the - connection pool; defaults to `DEFAULT_MAX_CONNECTIONS` - :param CacheConfig caching: Specifies whether local caching should be enabled and if so, - sets the cache properties; defaults to `CacheConfig.default()` + `SDK reference guide `_. + + To use this method, you must first install the ``redis`` package. Then, put the object + returned by this method into the ``feature_store`` property of your client configuration + (:class:`ldclient.config.Config`). + :: + + from ldclient.integrations import Redis + store = Redis.new_feature_store() + config = Config(feature_store=store) + + :param string url: the URL of the Redis host; defaults to ``DEFAULT_URL`` + :param string prefix: a namespace prefix to be prepended to all Redis keys; defaults to + ``DEFAULT_PREFIX`` + :param int max_connections: the maximum number of Redis connections to keep in the + connection pool; defaults to ``DEFAULT_MAX_CONNECTIONS`` + :param CacheConfig caching: specifies whether local caching should be enabled and if so, + sets the cache properties; defaults to :func:`ldclient.feature_store.CacheConfig.default()` """ core = _RedisFeatureStoreCore(url, prefix, max_connections) wrapper = CachingStoreWrapper(core, caching) @@ -124,95 +144,40 @@ def new_data_source(paths, auto_update=False, poll_interval=1, force_polling=Fal used in a test environment, to operate using a predetermined feature flag state without an actual LaunchDarkly connection. - To use this component, call `new_data_source`, specifying the file path(s) of your data file(s) - in the `path` parameter; then put the value returned by this method into the `update_processor_class` - property of your LaunchDarkly client configuration (:class:ldclient.config.Config). + To use this component, call ``new_data_source``, specifying the file path(s) of your data file(s) + in the ``paths`` parameter; then put the value returned by this method into the ``update_processor_class`` + property of your LaunchDarkly client configuration (:class:`ldclient.config.Config`). :: - data_source = LaunchDarkly::Integrations::Files.new_data_source(paths=[ myFilePath ]) + from ldclient.integrations import Files + data_source = Files.new_data_source(paths=[ myFilePath ]) config = Config(update_processor_class=data_source) This will cause the client not to connect to LaunchDarkly to get feature flags. The client may still make network connections to send analytics events, unless you have disabled - this with Config.send_events or Config.offline. - - Flag data files can be either JSON or YAML (in order to use YAML, you must install the 'pyyaml' - package). They contain an object with three possible properties: - - * "flags": Feature flag definitions. - * "flagValues": Simplified feature flags that contain only a value. - * "segments": User segment definitions. - - The format of the data in "flags" and "segments" is defined by the LaunchDarkly application - and is subject to change. Rather than trying to construct these objects yourself, it is simpler - to request existing flags directly from the LaunchDarkly server in JSON format, and use this - output as the starting point for your file. In Linux you would do this: - :: - - curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all - - The output will look something like this (but with many more properties): - :: - - { - "flags": { - "flag-key-1": { - "key": "flag-key-1", - "on": true, - "variations": [ "a", "b" ] - } - }, - "segments": { - "segment-key-1": { - "key": "segment-key-1", - "includes": [ "user-key-1" ] - } - } - } - - Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported - by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to - set specific flag keys to specific values. For that, you can use a much simpler format: - :: - - { - "flagValues": { - "my-string-flag-key": "value-1", - "my-boolean-flag-key": true, - "my-integer-flag-key": 3 - } - } - - Or, in YAML: - :: - - flagValues: - my-string-flag-key: "value-1" - my-boolean-flag-key: true - my-integer-flag-key: 1 + this in your configuration with ``send_events`` or ``offline``. - It is also possible to specify both "flags" and "flagValues", if you want some flags - to have simple values and others to have complex behavior. However, it is an error to use the - same flag key or segment key more than once, either in a single file or across multiple files. + The format of the data files is described in the SDK Reference Guide on + `Reading flags from a file `_. + Note that in order to use YAML, you will need to install the ``pyyaml`` package. If the data source encounters any error in any file-- malformed content, a missing file, or a duplicate key-- it will not load flags from any of the files. - :param array paths: The paths of the source files for loading flag data. These may be absolute paths - or relative to the current working directory. Files will be parsed as JSON unless the 'pyyaml' + :param array paths: the paths of the source files for loading flag data. These may be absolute paths + or relative to the current working directory. Files will be parsed as JSON unless the ``pyyaml`` package is installed, in which case YAML is also allowed. :param bool auto_update: (default: false) True if the data source should watch for changes to the source file(s) and reload flags whenever there is a change. The default implementation of this feature is based on - polling the filesystem, which may not perform well; if you install the 'watchdog' package (not - included by default, to avoid adding unwanted dependencies to the SDK), its native file watching - mechanism will be used instead. Note that auto-updating will only work if all of the files you - specified have valid directory paths at startup time. - :param float poll_interval: (default: 1) The minimum interval, in seconds, between checks for file - modifications-- used only if `auto_update` is true, and if the native file-watching mechanism from - `watchdog` is not being used. + polling the filesystem, which may not perform well; if you install the ``watchdog`` package, its + native file watching mechanism will be used instead. Note that auto-updating will only work if all + of the files you specified have valid directory paths at startup time. + :param float poll_interval: (default: 1) the minimum interval, in seconds, between checks for file + modifications-- used only if ``auto_update`` is true, and if the native file-watching mechanism from + ``watchdog`` is not being used. :param bool force_polling: (default: false) True if the data source should implement auto-update via polling the filesystem even if a native mechanism is available. This is mainly for SDK testing. - :return: an object (actually a lambda) to be stored in the `update_processor_class` configuration property + :return: an object (actually a lambda) to be stored in the ``update_processor_class`` configuration property """ return lambda config, store, ready : _FileDataSource(store, ready, paths, auto_update, poll_interval, force_polling) diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 9556bdfc..48c517b8 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -1,16 +1,22 @@ +""" +This submodule contains interfaces for various components of the SDK. + +They may be useful in writing new implementations of these components, or for testing. +""" + from abc import ABCMeta, abstractmethod, abstractproperty class FeatureStore(object): """ - A versioned store for feature flags and related objects received from LaunchDarkly. + Interface for a versioned store for feature flags and related objects received from LaunchDarkly. Implementations should permit concurrent access and updates. - An "object", for `FeatureStore`, is simply a dict of arbitrary data which must have at least - three properties: "key" (its unique key), "version" (the version number provided by - LaunchDarkly), and "deleted" (True if this is a placeholder for a deleted object). + An "object", for ``FeatureStore``, is simply a dict of arbitrary data which must have at least + three properties: ``key`` (its unique key), ``version`` (the version number provided by + LaunchDarkly), and ``deleted`` (True if this is a placeholder for a deleted object). - Delete and upsert requests are versioned-- if the version number in the request is less than + Delete and upsert requests are versioned: if the version number in the request is less than the currently stored version of the object, the request should be ignored. These semantics support the primary use case for the store, which synchronizes a collection @@ -22,7 +28,7 @@ class FeatureStore(object): def get(self, kind, key, callback=lambda x: x): """ Retrieves the object to which the specified key is mapped, or None if the key is not found - or the associated object has a "deleted" property of True. The retrieved object, if any (a + or the associated object has a ``deleted`` property of True. The retrieved object, if any (a dict) can be transformed by the specified callback. :param kind: The kind of object to get @@ -97,11 +103,11 @@ def initialized(self): class FeatureStoreCore(object): """ - `FeatureStoreCore` is an interface for a simplified subset of the functionality of :class:`FeatureStore`, - to be used in conjunction with :class:`feature_store_helpers.CachingStoreWrapper`. This allows developers - developers of custom `FeatureStore` implementations to avoid repeating logic that would + Interface for a simplified subset of the functionality of :class:`FeatureStore`, to be used + in conjunction with :class:`ldclient.feature_store_helpers.CachingStoreWrapper`. This allows + developers of custom ``FeatureStore`` implementations to avoid repeating logic that would commonly be needed in any such implementation, such as caching. Instead, they can implement - only `FeatureStoreCore` and then create a `CachingStoreWrapper`. + only ``FeatureStoreCore`` and then create a ``CachingStoreWrapper``. """ __metaclass__ = ABCMeta @@ -174,10 +180,8 @@ def initialized_internal(self): """ +# Internal use only. Common methods for components that perform a task in the background. class BackgroundOperation(object): - """ - Performs a task in the background - """ # noinspection PyMethodMayBeStatic def start(self): @@ -203,20 +207,24 @@ def is_alive(self): class UpdateProcessor(BackgroundOperation): """ - Responsible for retrieving Feature Flag updates from LaunchDarkly and saving them to the feature store + Interface for the component that obtains feature flag data in some way and passes it to a + :class:`FeatureStore`. The built-in implementations of this are the client's standard streaming + or polling behavior. For testing purposes, there is also :func:`ldclient.integrations.Files.new_data_source()`. """ __metaclass__ = ABCMeta def initialized(self): """ Returns whether the update processor has received feature flags and has initialized its feature store. + :rtype: bool """ class EventProcessor(object): """ - Buffers analytics events and sends them to LaunchDarkly + Interface for the component that buffers analytics events and sends them to LaunchDarkly. + The default implementation can be replaced for testing purposes. """ __metaclass__ = ABCMeta @@ -231,7 +239,7 @@ def flush(self): """ Specifies that any buffered events should be sent as soon as possible, rather than waiting for the next flush interval. This method is asynchronous, so events still may not be sent - until a later time. However, calling stop() will synchronously deliver any events that were + until a later time. However, calling ``stop()`` will synchronously deliver any events that were not yet delivered prior to shutting down. """ @@ -244,7 +252,8 @@ def stop(self): class FeatureRequester(object): """ - Requests features. + Interface for the component that acquires feature flag data in polling mode. The default + implementation can be replaced for testing purposes. """ __metaclass__ = ABCMeta @@ -254,7 +263,7 @@ def get_all(self): """ pass - def get_one(self, key): + def get_one(self, kind, key): """ Gets one Feature flag :return: diff --git a/ldclient/lru_cache.py b/ldclient/lru_cache.py index 53cbf5d2..9833287b 100644 --- a/ldclient/lru_cache.py +++ b/ldclient/lru_cache.py @@ -1,13 +1,13 @@ -''' -A dictionary-based cache that removes the oldest entries when its limit is exceeded. -Values are only refreshed by writing, not by reading. Not thread-safe. -''' +""" +Internal helper class for caching. +""" +# currently excluded from documentation - see ldclient.rst from collections import OrderedDict # Backport of Python 3.2 move_to_end method which doesn't exist in 2.7 -class OrderedDictWithReordering(OrderedDict): +class _OrderedDictWithReordering(OrderedDict): if not hasattr(OrderedDict, 'move_to_end'): # backport of Python 3.2 logic def move_to_end(self, key, last=True): @@ -28,9 +28,12 @@ def move_to_end(self, key, last=True): class SimpleLRUCache(object): + """A dictionary-based cache that removes the oldest entries when its limit is exceeded. + Values are only refreshed by writing, not by reading. Not thread-safe. + """ def __init__(self, capacity): self.capacity = capacity - self.cache = OrderedDictWithReordering() + self.cache = _OrderedDictWithReordering() def get(self, key): return self.cache.get(key) diff --git a/ldclient/memoized_value.py b/ldclient/memoized_value.py index b2c38fea..08fb2d51 100644 --- a/ldclient/memoized_value.py +++ b/ldclient/memoized_value.py @@ -1,12 +1,17 @@ -''' -Simple implementation of a thread-safe memoized value whose generator function will never be -run more than once, and whose value can be overridden by explicit assignment. -''' +""" +Internal helper class for caching. No longer used. +""" +# currently excluded from documentation - see ldclient.rst from threading import RLock class MemoizedValue(object): + """Simple implementation of a thread-safe memoized value whose generator function will never be + run more than once, and whose value can be overridden by explicit assignment. + .. deprecated:: 6.7.0 + No longer used. Retained here only in case third parties were using it for another purpose. + """ def __init__(self, generator): self.generator = generator self.inited = False diff --git a/ldclient/operators.py b/ldclient/operators.py index 88a76cd1..208edcbd 100644 --- a/ldclient/operators.py +++ b/ldclient/operators.py @@ -1,3 +1,8 @@ +""" +Implementation details of feature flag evaluation. +""" +# currently excluded from documentation - see ldclient.rst + import logging import re import semver diff --git a/ldclient/polling.py b/ldclient/polling.py index 19ed0a7d..6f1be549 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -1,3 +1,8 @@ +""" +Default implementation of the polling component. +""" +# currently excluded from documentation - see ldclient.rst + from threading import Thread from ldclient.interfaces import UpdateProcessor diff --git a/ldclient/redis_feature_store.py b/ldclient/redis_feature_store.py index ff93c402..1e49d9ee 100644 --- a/ldclient/redis_feature_store.py +++ b/ldclient/redis_feature_store.py @@ -11,10 +11,11 @@ # will migrate away from exposing these concrete classes and use only the factory methods. class RedisFeatureStore(FeatureStore): - """A Redis-backed implementation of :class:`ldclient.feature_store.FeatureStore`. + """A Redis-backed implementation of :class:`ldclient.interfaces.FeatureStore`. - This module and this implementation class are deprecated and may be changed or removed in the future. - Please use :func:`ldclient.integrations.Redis.new_feature_store()`. + .. deprecated:: 6.7.0 + This module and this implementation class are deprecated and may be changed or removed in the future. + Please use :func:`ldclient.integrations.Redis.new_feature_store()`. """ def __init__(self, url='redis://localhost:6379/0', diff --git a/ldclient/repeating_timer.py b/ldclient/repeating_timer.py index 956cfbcd..49fd043c 100644 --- a/ldclient/repeating_timer.py +++ b/ldclient/repeating_timer.py @@ -1,3 +1,8 @@ +""" +Internal helper class for repeating tasks. +""" +# currently excluded from documentation - see ldclient.rst + from threading import Event, Thread class RepeatingTimer(object): diff --git a/ldclient/rwlock.py b/ldclient/rwlock.py index 8416a35c..feaa510e 100644 --- a/ldclient/rwlock.py +++ b/ldclient/rwlock.py @@ -1,3 +1,8 @@ +""" +Internal helper class for locking. +""" +# currently excluded from documentation - see ldclient.rst + import threading diff --git a/ldclient/sse_client.py b/ldclient/sse_client.py index 5b41413b..a1e0f6fb 100644 --- a/ldclient/sse_client.py +++ b/ldclient/sse_client.py @@ -1,3 +1,10 @@ +""" +Server-Sent Events implementation for streaming. + +Based on: https://bitbucket.org/btubbs/sseclient/src/a47a380a3d7182a205c0f1d5eb470013ce796b4d/sseclient.py?at=default&fileviewer=file-view-default +""" +# currently excluded from documentation - see ldclient.rst + import re import time import warnings @@ -9,8 +16,6 @@ from ldclient.util import create_http_pool_manager from ldclient.util import throw_if_unsuccessful_response -# Inspired by: https://bitbucket.org/btubbs/sseclient/src/a47a380a3d7182a205c0f1d5eb470013ce796b4d/sseclient.py?at=default&fileviewer=file-view-default - # Technically, we should support streams that mix line endings. This regex, # however, assumes that a system will provide consistent line endings. end_of_field = re.compile(r'\r\n\r\n|\r\r|\n\n') diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 20599eb1..eba4d5fc 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -1,3 +1,8 @@ +""" +Default implementation of the streaming component. +""" +# currently excluded from documentation - see ldclient.rst + from collections import namedtuple import json diff --git a/ldclient/user_filter.py b/ldclient/user_filter.py index d48ab23f..6379287e 100644 --- a/ldclient/user_filter.py +++ b/ldclient/user_filter.py @@ -1,3 +1,8 @@ +""" +Internal helper class for filtering out private attributes. +""" +# currently excluded from documentation - see ldclient.rst + import jsonpickle import six diff --git a/ldclient/util.py b/ldclient/util.py index fbb2f11d..02c84ea0 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -1,3 +1,8 @@ +""" +General internal helper functions. +""" +# currently excluded from documentation - see ldclient.rst + import certifi import logging import sys diff --git a/ldclient/versioned_data_kind.py b/ldclient/versioned_data_kind.py index 04acce43..37504394 100644 --- a/ldclient/versioned_data_kind.py +++ b/ldclient/versioned_data_kind.py @@ -1,17 +1,24 @@ -from collections import namedtuple - """ -These objects denote the types of data that can be stored in the feature store and -referenced in the API. If we add another storable data type in the future, as long as it -follows the same pattern (having "key", "version", and "deleted" properties), we only need -to add a corresponding constant here and the existing store should be able to handle it. +This submodule is used only by the internals of the feature flag storage mechanism. + +If you are writing your own implementation of :class:`ldclient.integrations.FeatureStore`, the +:class:`VersionedDataKind` tuple type will be passed to the ``kind`` parameter of the feature +store methods; its ``namespace`` property tells the feature store which collection of objects is +being referenced ("features", "segments", etc.). The intention is for the feature store to treat +storable objects as completely generic JSON dictionaries, rather than having any special logic +for features or segments. """ +from collections import namedtuple + # Note that VersionedDataKind without the extra attributes is no longer used in the SDK, # but it's preserved here for backward compatibility just in case someone else used it VersionedDataKind = namedtuple('VersionedDataKind', ['namespace', 'request_api_path', 'stream_api_path']) +# Note, feature store implementors really don't need to know about this class so we could just +# not document it at all, but apparently namedtuple() creates its own docstrings so it's going +# to show up in any case. VersionedDataKindWithOrdering = namedtuple('VersionedDataKindWithOrdering', ['namespace', 'request_api_path', 'stream_api_path', 'priority', 'get_dependency_keys']) From 71534aa6a104aa9fc94d6631612a0bac26834b32 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Tue, 5 Feb 2019 12:44:45 -0800 Subject: [PATCH 069/190] add consul to test-requirements, remove specific reference to install redis, consul, dynamo deps in azure ci --- azure-pipelines.yml | 3 --- test-requirements.txt | 1 + 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index e4f8f7af..c0d39fdb 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -46,9 +46,6 @@ jobs: script: | python --version pip install -r test-requirements.txt - pip install -r redis-requirements.txt - pip install -r consul-requirements.txt - pip install -r dynamodb-requirements.txt python setup.py install mkdir test-reports pytest -s --junitxml=test-reports/junit.xml testing; diff --git a/test-requirements.txt b/test-requirements.txt index 88cbbc2e..714c8bd2 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -8,3 +8,4 @@ pytest-cov>=2.4.0 codeclimate-test-reporter>=0.2.1 pyyaml>=3.0 watchdog>=0.9 +python-consul>=1.0.1 From 6bbd65f1dc540c4c8eca25079d160495b97258b5 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Tue, 5 Feb 2019 12:53:36 -0800 Subject: [PATCH 070/190] Revert "add consul to test-requirements, remove specific reference to install redis, consul, dynamo deps in azure ci" This reverts commit 71534aa6a104aa9fc94d6631612a0bac26834b32. --- azure-pipelines.yml | 3 +++ test-requirements.txt | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index c0d39fdb..e4f8f7af 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -46,6 +46,9 @@ jobs: script: | python --version pip install -r test-requirements.txt + pip install -r redis-requirements.txt + pip install -r consul-requirements.txt + pip install -r dynamodb-requirements.txt python setup.py install mkdir test-reports pytest -s --junitxml=test-reports/junit.xml testing; diff --git a/test-requirements.txt b/test-requirements.txt index 714c8bd2..88cbbc2e 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -8,4 +8,3 @@ pytest-cov>=2.4.0 codeclimate-test-reporter>=0.2.1 pyyaml>=3.0 watchdog>=0.9 -python-consul>=1.0.1 From 0ec55a76efedd72ef59f22442308747d9bac1209 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Tue, 5 Feb 2019 12:55:29 -0800 Subject: [PATCH 071/190] remove redis and dynamo explicit dep reference --- azure-pipelines.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index e4f8f7af..b7f19ff3 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -46,9 +46,7 @@ jobs: script: | python --version pip install -r test-requirements.txt - pip install -r redis-requirements.txt pip install -r consul-requirements.txt - pip install -r dynamodb-requirements.txt python setup.py install mkdir test-reports pytest -s --junitxml=test-reports/junit.xml testing; From 49c5993af42fa554d99fd207129f62a456c3c488 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 13:45:14 -0800 Subject: [PATCH 072/190] add requirements.txt --- docs/requirements.txt | 1 + 1 file changed, 1 insertion(+) create mode 100644 docs/requirements.txt diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..ef90363c --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1 @@ +sphinx<2.0 From 5228df7abbf6d0aa5d608c354d9ccf530f3aafe7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 13:49:47 -0800 Subject: [PATCH 073/190] add config file --- .readthedocs.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 .readthedocs.yml diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000..56781a23 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,12 @@ +version: 2 + +python: + version: 3.7 + install: + - requirements: docs/requirements.txt + - requirements: requirements.txt + +sphinx: + builder: html + configuration: docs/conf.py + fail_on_warning: true From f4e5c8604883c09f2cc9b2f0e191139272c3e6a5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 15:10:44 -0800 Subject: [PATCH 074/190] break up API docs into logical groups with a better home page --- docs/README.md | 29 ++++++++++++ docs/api-extending.rst | 17 +++++++ docs/api-integrations.rst | 10 +++++ docs/api-main.rst | 40 +++++++++++++++++ docs/index.rst | 22 +++++----- docs/ldclient.rst | 83 ----------------------------------- ldclient/__init__.py | 1 + ldclient/event_processor.py | 2 +- ldclient/event_summarizer.py | 2 +- ldclient/feature_requester.py | 2 +- 10 files changed, 111 insertions(+), 97 deletions(-) create mode 100644 docs/README.md create mode 100644 docs/api-extending.rst create mode 100644 docs/api-integrations.rst create mode 100644 docs/api-main.rst delete mode 100644 docs/ldclient.rst diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..1eb6a999 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,29 @@ +# How the Python SDK documentation works + +The generated API documentation is built with [Sphinx](http://www.sphinx-doc.org/en/master/), and is hosted on [Read the Docs](https://readthedocs.org/). + +It uses the following: + +* Docstrings within the code. Docstrings can use any of the markup supported by Sphinx. +* The `.rst` files in the `docs` directory. These provide the overall page structure. +* The `conf.py` file containing Sphinx settings. + +## What to document + +Every public class, method, and module should have a docstring. Classes and methods with no docstring will not be included in the API docs. + +"Public" here means things that we want third-party developers to use. The SDK also contains many modules and classes that are not actually private (i.e. they aren't prefixed with `_`), but are for internal use only and aren't supported for any other use (we would like to reduce the amount of these in future). + +To cause a class or method in an existing module to be added to the docs, all that's necessary is to give it a docstring. + +To add a new module to the docs, give it a docstring and then add a link to it in the appropriate `api-*.rst` file, in the same format as the existing links. + +## Undocumented things + +Modules that contain only implementation details are omitted from the docs by simply not including links to them in the `.rst` files. + +Internal classes in a documented module will be omitted from the docs if they do not have any docstrings, unless they inherit from another class that has docstrings. In the latter case, the way to omit them from the docs is to edit the `.rst` file that contains the link to that module, and add a `:members:` directive under the module that specifically lists all the classes that _should_ be shown. + +## Testing + +In the `docs` directory, run `make html` to build all the docs. Then view `docs/build/html/index.html`. diff --git a/docs/api-extending.rst b/docs/api-extending.rst new file mode 100644 index 00000000..8c951904 --- /dev/null +++ b/docs/api-extending.rst @@ -0,0 +1,17 @@ +Extending the SDK +================= + +ldclient.interfaces module +-------------------------- + +.. automodule:: ldclient.interfaces + :members: + :special-members: __init__ + :show-inheritance: + +ldclient.versioned_data_kind module +----------------------------------- + +.. automodule:: ldclient.versioned_data_kind + :members: + :show-inheritance: diff --git a/docs/api-integrations.rst b/docs/api-integrations.rst new file mode 100644 index 00000000..8d8146ff --- /dev/null +++ b/docs/api-integrations.rst @@ -0,0 +1,10 @@ +Integrating with other services +=============================== + +ldclient.integrations module +---------------------------- + +.. automodule:: ldclient.integrations + :members: + :special-members: __init__ + :show-inheritance: diff --git a/docs/api-main.rst b/docs/api-main.rst new file mode 100644 index 00000000..56417ea5 --- /dev/null +++ b/docs/api-main.rst @@ -0,0 +1,40 @@ +Core API +======== + +ldclient module +--------------- + +.. automodule:: ldclient + :members: get,set_config,set_sdk_key + :show-inheritance: + +ldclient.client module +---------------------- + +.. automodule:: ldclient.client + :members: LDClient + :special-members: __init__ + :show-inheritance: + +ldclient.config module +---------------------- + +.. automodule:: ldclient.config + :members: + :special-members: __init__ + :show-inheritance: + +ldclient.flag module +-------------------- + +.. automodule:: ldclient.flag + :members: EvaluationDetail + :special-members: __init__ + :show-inheritance: + +ldclient.flags_state module +--------------------------- + +.. automodule:: ldclient.flags_state + :members: + :show-inheritance: diff --git a/docs/index.rst b/docs/index.rst index 735da978..7a9d2c73 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -3,19 +3,19 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Welcome to ldclient-py's documentation! -======================================= +LaunchDarkly Python SDK +======================= -.. toctree:: - :maxdepth: 2 - :caption: Contents: +This is the API reference for the `LaunchDarkly `_ SDK for Python. -.. automodule:: ldclient +The latest version of the SDK can be found on `PyPI `_, and the source code is on `GitHub `_. +For more information, see LaunchDarkly's `Quickstart `_ and `SDK Reference Guide `_. -Indices and tables -================== +.. toctree:: + :maxdepth: 2 + :caption: Contents: -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` + api-main + api-integrations + api-extending diff --git a/docs/ldclient.rst b/docs/ldclient.rst deleted file mode 100644 index 4a212c16..00000000 --- a/docs/ldclient.rst +++ /dev/null @@ -1,83 +0,0 @@ -ldclient package -================ - -Module contents ---------------- - -.. automodule:: ldclient - :members: - :undoc-members: - :show-inheritance: - -Submodules ----------- - -ldclient.client module ----------------------- - -.. automodule:: ldclient.client - :members: LDClient - :special-members: __init__ - :show-inheritance: - -ldclient.config module ----------------------- - -.. automodule:: ldclient.config - :members: - :special-members: __init__ - :show-inheritance: - -ldclient.feature\_store module ------------------------------- - -.. automodule:: ldclient.feature_store - :members: - :special-members: __init__ - :show-inheritance: - -ldclient.feature\_store\_helpers module ---------------------------------------- - -.. automodule:: ldclient.feature_store_helpers - :members: - :special-members: __init__ - :show-inheritance: - -ldclient.flag module --------------------- - -.. automodule:: ldclient.flag - :members: EvaluationDetail - :special-members: __init__ - :show-inheritance: - -ldclient.flags\_state module ----------------------------- - -.. automodule:: ldclient.flags_state - :members: - :show-inheritance: - -ldclient.integrations module ----------------------------- - -.. automodule:: ldclient.integrations - :members: - :special-members: __init__ - :show-inheritance: - -ldclient.interfaces module --------------------------- - -.. automodule:: ldclient.interfaces - :members: - :special-members: __init__ - :show-inheritance: - -ldclient.versioned\_data\_kind module -------------------------------------- - -.. automodule:: ldclient.versioned_data_kind - :members: - :show-inheritance: diff --git a/ldclient/__init__.py b/ldclient/__init__.py index 5be96db9..5dfb011f 100644 --- a/ldclient/__init__.py +++ b/ldclient/__init__.py @@ -122,6 +122,7 @@ def get(): __lock.unlock() +# currently hidden from documentation - see api-main.rst class NullHandler(logging.Handler): """A :class:`logging.Handler` implementation that does nothing. diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 03134b64..3267f451 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -1,7 +1,7 @@ """ Implementation details of the analytics event delivery component. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation from collections import namedtuple from email.utils import parsedate diff --git a/ldclient/event_summarizer.py b/ldclient/event_summarizer.py index 2d084ddc..ec4003b9 100644 --- a/ldclient/event_summarizer.py +++ b/ldclient/event_summarizer.py @@ -1,7 +1,7 @@ """ Implementation details of the analytics event delivery component. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation from collections import namedtuple diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 4414fb7a..11892e58 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -1,7 +1,7 @@ """ Default implementation of feature flag polling requests. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation from collections import namedtuple import json From 9445a6ebbec338c0ae6570a486f75bb9de374b98 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 15:17:20 -0800 Subject: [PATCH 075/190] misc cleanup --- docs/api-extending.rst | 8 ++++++++ ldclient/event_processor.py | 2 +- ldclient/event_summarizer.py | 2 +- ldclient/feature_requester.py | 2 +- ldclient/file_data_source.py | 2 +- ldclient/fixed_thread_pool.py | 2 +- ldclient/lru_cache.py | 2 +- ldclient/memoized_value.py | 2 +- ldclient/operators.py | 2 +- ldclient/polling.py | 2 +- ldclient/repeating_timer.py | 2 +- ldclient/rwlock.py | 2 +- ldclient/sse_client.py | 2 +- ldclient/streaming.py | 2 +- ldclient/user_filter.py | 2 +- ldclient/util.py | 2 +- 16 files changed, 23 insertions(+), 15 deletions(-) diff --git a/docs/api-extending.rst b/docs/api-extending.rst index 8c951904..4f668ce0 100644 --- a/docs/api-extending.rst +++ b/docs/api-extending.rst @@ -9,6 +9,14 @@ ldclient.interfaces module :special-members: __init__ :show-inheritance: +ldclient.feature_store_helpers module +------------------------------------- + +.. automodule:: ldclient.feature_store_helpers + :members: + :special-members: __init__ + :show-inheritance: + ldclient.versioned_data_kind module ----------------------------------- diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 3267f451..f7a9178f 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -1,7 +1,7 @@ """ Implementation details of the analytics event delivery component. """ -# currently excluded from documentation +# currently excluded from documentation - see docs/README.md from collections import namedtuple from email.utils import parsedate diff --git a/ldclient/event_summarizer.py b/ldclient/event_summarizer.py index ec4003b9..c0aa5aeb 100644 --- a/ldclient/event_summarizer.py +++ b/ldclient/event_summarizer.py @@ -1,7 +1,7 @@ """ Implementation details of the analytics event delivery component. """ -# currently excluded from documentation +# currently excluded from documentation - see docs/README.md from collections import namedtuple diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 11892e58..51aee6a0 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -1,7 +1,7 @@ """ Default implementation of feature flag polling requests. """ -# currently excluded from documentation +# currently excluded from documentation - see docs/README.md from collections import namedtuple import json diff --git a/ldclient/file_data_source.py b/ldclient/file_data_source.py index 527acec5..56da8de8 100644 --- a/ldclient/file_data_source.py +++ b/ldclient/file_data_source.py @@ -1,7 +1,7 @@ """ Deprecated entry point for a component that has been moved. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md from ldclient.impl.integrations.files.file_data_source import _FileDataSource from ldclient.interfaces import UpdateProcessor diff --git a/ldclient/fixed_thread_pool.py b/ldclient/fixed_thread_pool.py index 17ded510..27fca13d 100644 --- a/ldclient/fixed_thread_pool.py +++ b/ldclient/fixed_thread_pool.py @@ -1,7 +1,7 @@ """ Internal helper class for thread management. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md from threading import Event, Lock, Thread diff --git a/ldclient/lru_cache.py b/ldclient/lru_cache.py index 9833287b..f8f18e37 100644 --- a/ldclient/lru_cache.py +++ b/ldclient/lru_cache.py @@ -1,7 +1,7 @@ """ Internal helper class for caching. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md from collections import OrderedDict diff --git a/ldclient/memoized_value.py b/ldclient/memoized_value.py index 08fb2d51..7abc944f 100644 --- a/ldclient/memoized_value.py +++ b/ldclient/memoized_value.py @@ -1,7 +1,7 @@ """ Internal helper class for caching. No longer used. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md from threading import RLock diff --git a/ldclient/operators.py b/ldclient/operators.py index 208edcbd..253e8a8b 100644 --- a/ldclient/operators.py +++ b/ldclient/operators.py @@ -1,7 +1,7 @@ """ Implementation details of feature flag evaluation. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md import logging import re diff --git a/ldclient/polling.py b/ldclient/polling.py index 6f1be549..59803a30 100644 --- a/ldclient/polling.py +++ b/ldclient/polling.py @@ -1,7 +1,7 @@ """ Default implementation of the polling component. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md from threading import Thread diff --git a/ldclient/repeating_timer.py b/ldclient/repeating_timer.py index 49fd043c..eb8aa771 100644 --- a/ldclient/repeating_timer.py +++ b/ldclient/repeating_timer.py @@ -1,7 +1,7 @@ """ Internal helper class for repeating tasks. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md from threading import Event, Thread diff --git a/ldclient/rwlock.py b/ldclient/rwlock.py index feaa510e..251d5eb4 100644 --- a/ldclient/rwlock.py +++ b/ldclient/rwlock.py @@ -1,7 +1,7 @@ """ Internal helper class for locking. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md import threading diff --git a/ldclient/sse_client.py b/ldclient/sse_client.py index a1e0f6fb..49d853c7 100644 --- a/ldclient/sse_client.py +++ b/ldclient/sse_client.py @@ -3,7 +3,7 @@ Based on: https://bitbucket.org/btubbs/sseclient/src/a47a380a3d7182a205c0f1d5eb470013ce796b4d/sseclient.py?at=default&fileviewer=file-view-default """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md import re import time diff --git a/ldclient/streaming.py b/ldclient/streaming.py index eba4d5fc..43e815a4 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -1,7 +1,7 @@ """ Default implementation of the streaming component. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md from collections import namedtuple diff --git a/ldclient/user_filter.py b/ldclient/user_filter.py index 6379287e..f7dc7f9d 100644 --- a/ldclient/user_filter.py +++ b/ldclient/user_filter.py @@ -1,7 +1,7 @@ """ Internal helper class for filtering out private attributes. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md import jsonpickle import six diff --git a/ldclient/util.py b/ldclient/util.py index 02c84ea0..b1d533a2 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -1,7 +1,7 @@ """ General internal helper functions. """ -# currently excluded from documentation - see ldclient.rst +# currently excluded from documentation - see docs/README.md import certifi import logging From c496c35485b238738731c05161af759998ca7664 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 15:19:08 -0800 Subject: [PATCH 076/190] misc cleanup --- ldclient/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/__init__.py b/ldclient/__init__.py index 5dfb011f..8624bc10 100644 --- a/ldclient/__init__.py +++ b/ldclient/__init__.py @@ -122,7 +122,7 @@ def get(): __lock.unlock() -# currently hidden from documentation - see api-main.rst +# currently hidden from documentation - see docs/README.md class NullHandler(logging.Handler): """A :class:`logging.Handler` implementation that does nothing. From 68bb4e488ea24051c1a99ce6331028ddb74220d7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 15:26:49 -0800 Subject: [PATCH 077/190] RTD config fixes --- .readthedocs.yml | 2 +- docs/requirements.txt | 9 +++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/.readthedocs.yml b/.readthedocs.yml index 56781a23..2739d0f6 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,7 +1,7 @@ version: 2 python: - version: 3.7 + version: 3.5 install: - requirements: docs/requirements.txt - requirements: requirements.txt diff --git a/docs/requirements.txt b/docs/requirements.txt index ef90363c..5e064a36 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1 +1,10 @@ sphinx<2.0 + +backoff>=1.4.3 +certifi>=2018.4.16 +expiringdict>=1.1.4 +six>=1.10.0 +pyRFC3339>=1.0 +jsonpickle==0.9.3 +semver>=2.7.9 +urllib3>=1.22.0 From 6766920631d3877f24f7703e7de3f774d2519947 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 15:43:10 -0800 Subject: [PATCH 078/190] minor edit --- docs/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/README.md b/docs/README.md index 1eb6a999..fee89947 100644 --- a/docs/README.md +++ b/docs/README.md @@ -14,7 +14,7 @@ Every public class, method, and module should have a docstring. Classes and meth "Public" here means things that we want third-party developers to use. The SDK also contains many modules and classes that are not actually private (i.e. they aren't prefixed with `_`), but are for internal use only and aren't supported for any other use (we would like to reduce the amount of these in future). -To cause a class or method in an existing module to be added to the docs, all that's necessary is to give it a docstring. +To add an undocumented class or method in an existing module to the docs, just give it a docstring. To add a new module to the docs, give it a docstring and then add a link to it in the appropriate `api-*.rst` file, in the same format as the existing links. From 74e82c8de2f6f45d02c472302436f4d57e2e22f4 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 16:40:08 -0800 Subject: [PATCH 079/190] misc. doc comment edits --- ldclient/__init__.py | 4 ++-- ldclient/client.py | 6 +----- ldclient/config.py | 11 +++++------ ldclient/flag.py | 2 +- ldclient/flags_state.py | 2 +- 5 files changed, 10 insertions(+), 15 deletions(-) diff --git a/ldclient/__init__.py b/ldclient/__init__.py index 8624bc10..d75b6b61 100644 --- a/ldclient/__init__.py +++ b/ldclient/__init__.py @@ -32,7 +32,7 @@ def set_config(config): re-initialized with the new configuration (this will result in the next call to :func:`ldclient.get()` returning a new client instance). - :param string sdk_key: the new SDK key + :param ldclient.config.Config config: the client configuration """ global __config global __client @@ -93,7 +93,7 @@ def get(): """Returns the shared SDK client instance, using the current global configuration. To use the SDK as a singleton, first make sure you have called :func:`ldclient.set_sdk_key()` or - :func:`ldclient.set_config()` at startup time. Then `get()` will return the same shared + :func:`ldclient.set_config()` at startup time. Then ``get()`` will return the same shared :class:`ldclient.client.LDClient` instance each time. The client will be initialized if it has not been already. diff --git a/ldclient/client.py b/ldclient/client.py index 61be996e..8ae8a5a1 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -71,10 +71,6 @@ class LDClient(object): def __init__(self, sdk_key=None, config=None, start_wait=5): """Constructs a new LDClient instance. - Rather than calling this constructor directly, you can call the `ldclient.set_sdk_key`, - `ldclient.set_config`, and `ldclient.get` functions to configure and use a singleton - client instance. - :param string sdk_key: the SDK key for your LaunchDarkly environment :param ldclient.config.Config config: optional custom configuration :param float start_wait: the number of seconds to wait for a successful connection to LaunchDarkly @@ -203,7 +199,7 @@ def is_offline(self): def is_initialized(self): """Returns true if the client has successfully connected to LaunchDarkly. - :rype: bool + :rtype: bool """ return self.is_offline() or self._config.use_ldd or self._update_processor.initialized() diff --git a/ldclient/config.py b/ldclient/config.py index 2d99c72e..f8ef61d0 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -166,9 +166,8 @@ def copy_with_new_sdk_key(self, new_sdk_key): user_keys_flush_interval=self.__user_keys_flush_interval, inline_users_in_events=self.__inline_users_in_events) + # for internal use only - probably should be part of the client logic def get_default(self, key, default): - """Used internally by the SDK client to get the default value for a flag. - """ return default if key not in self.__defaults else self.__defaults[key] @property @@ -179,22 +178,22 @@ def sdk_key(self): def base_uri(self): return self.__base_uri + # for internal use only - also no longer used, will remove @property def get_latest_flags_uri(self): - """Used internally, deprecated. - - .. deprecated:: 5.0.0 - """ return self.__base_uri + GET_LATEST_FEATURES_PATH + # for internal use only - should construct the URL path in the events code, not here @property def events_uri(self): return self.__events_uri + '/bulk' + # for internal use only @property def stream_base_uri(self): return self.__stream_uri + # for internal use only - should construct the URL path in the streaming code, not here @property def stream_uri(self): return self.__stream_uri + STREAM_FLAGS_PATH diff --git a/ldclient/flag.py b/ldclient/flag.py index 83986092..88739ba0 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -45,7 +45,7 @@ def variation_index(self): """The index of the returned value within the flag's list of variations, e.g. 0 for the first variation -- or None if the default value was returned. - :rtype: int + :rtype: int or None """ return self.__variation_index diff --git a/ldclient/flags_state.py b/ldclient/flags_state.py index a0ba668b..2f611aa6 100644 --- a/ldclient/flags_state.py +++ b/ldclient/flags_state.py @@ -63,7 +63,7 @@ def get_flag_reason(self, key): :param string key: the feature flag key :return: a dictionary describing the reason; None if reasons were not recorded, or if there was no such flag - :rtype: dict + :rtype: dict or None """ meta = self.__flag_metadata.get(key) return None if meta is None else meta.get('reason') From 9316b0d2068df4d53bb2102abe5efd8f3fe0cccc Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 5 Feb 2019 16:22:17 -0800 Subject: [PATCH 080/190] use RTD theme --- docs/Makefile | 2 +- docs/conf.py | 2 +- docs/requirements.txt | 1 + 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/Makefile b/docs/Makefile index 13edc19b..ebce0c0b 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -13,7 +13,7 @@ help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) install: - pip install sphinx + pip install -r requirements.txt html: install @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py index 479f3bc8..f1dc322b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -83,7 +83,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'alabaster' +html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the diff --git a/docs/requirements.txt b/docs/requirements.txt index 5e064a36..f6c80357 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,4 +1,5 @@ sphinx<2.0 +sphinx_rtd_theme backoff>=1.4.3 certifi>=2018.4.16 From 338910cb4d5528ea917c40d0ba2286457542b94c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 11 Feb 2019 12:54:37 -0800 Subject: [PATCH 081/190] remove jsonpickle --- ldclient/event_processor.py | 4 ++-- ldclient/user_filter.py | 1 - requirements.txt | 1 - test-requirements.txt | 1 + testing/test_flags_state.py | 2 ++ 5 files changed, 5 insertions(+), 4 deletions(-) diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index f7a9178f..30619298 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -6,7 +6,7 @@ from collections import namedtuple from email.utils import parsedate import errno -import jsonpickle +import json from threading import Event, Lock, Thread import six import time @@ -168,7 +168,7 @@ def run(self): def _do_send(self, output_events): # noinspection PyBroadException try: - json_body = jsonpickle.encode(output_events, unpicklable=False) + json_body = json.dumps(output_events) log.debug('Sending events payload: ' + json_body) hdrs = _headers(self._config.sdk_key) hdrs['X-LaunchDarkly-Event-Schema'] = str(__CURRENT_EVENT_SCHEMA__) diff --git a/ldclient/user_filter.py b/ldclient/user_filter.py index f7dc7f9d..fe5baa39 100644 --- a/ldclient/user_filter.py +++ b/ldclient/user_filter.py @@ -3,7 +3,6 @@ """ # currently excluded from documentation - see docs/README.md -import jsonpickle import six diff --git a/requirements.txt b/requirements.txt index f86f3039..2e3cba6f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,6 +3,5 @@ certifi>=2018.4.16 expiringdict>=1.1.4 six>=1.10.0 pyRFC3339>=1.0 -jsonpickle==0.9.3 semver>=2.7.9 urllib3>=1.22.0 diff --git a/test-requirements.txt b/test-requirements.txt index 88cbbc2e..3bc09d90 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -3,6 +3,7 @@ pytest>=2.8 redis>=2.10.5 boto3>=1.9.71 coverage>=4.4 +jsonpickle==0.9.3 pytest-capturelog>=0.7 pytest-cov>=2.4.0 codeclimate-test-reporter>=0.2.1 diff --git a/testing/test_flags_state.py b/testing/test_flags_state.py index 45ea6404..f8e6d464 100644 --- a/testing/test_flags_state.py +++ b/testing/test_flags_state.py @@ -58,6 +58,8 @@ def test_can_convert_to_json_string(): str = state.to_json_string() assert json.loads(str) == obj +# We don't actually use jsonpickle in the SDK, but FeatureFlagsState has a magic method that makes it +# behave correctly in case the application uses jsonpickle to serialize it. def test_can_serialize_with_jsonpickle(): state = FeatureFlagsState(True) flag1 = { 'key': 'key1', 'version': 100, 'offVariation': 0, 'variations': [ 'value1' ], 'trackEvents': False } From f586cd11f3be50ba0f41e547d58b3eb390a3f4fd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 11 Feb 2019 13:05:59 -0800 Subject: [PATCH 082/190] misc doc comment/readme edits prior to publishing docs --- README.md | 12 ++++++++---- ldclient/client.py | 14 ++++++++++++-- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 61e67050..6013179f 100644 --- a/README.md +++ b/README.md @@ -81,20 +81,22 @@ The SDK is tested with the most recent patch releases of Python 2.7, 3.3, 3.4, 3 Database integrations --------------------- -Feature flag data can be kept in a persistent store using Consul, DynamoDB, or Redis. These adapters are implemented in the `Consul`, `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://github.com/launchdarkly/python-client-private/blob/master/ldclient/integrations.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +Feature flag data can be kept in a persistent store using Consul, DynamoDB, or Redis. These adapters are implemented in the `Consul`, `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://launchdarkly-python-sdk.readthedocs.io/en/latest/api-integrations.html#module-ldclient.integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. Note that Consul is not supported in Python 3.3 or 3.4. Using flag data from a file --------------------------- -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.py`](https://github.com/launchdarkly/python-client/blob/master/ldclient/file_data_source.py) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`ldclient.integrations.Files`](https://launchdarkly-python-sdk.readthedocs.io/en/latest/api-integrations.html#ldclient.integrations.Files) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. Learn more ------------ +---------- Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [complete reference guide for this SDK](http://docs.launchdarkly.com/docs/python-sdk-reference). +Generated API documentation is on [readthedocs.io](https://launchdarkly-python-sdk.readthedocs.io/en/latest/). + Testing ------- @@ -116,16 +118,18 @@ About LaunchDarkly * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. * LaunchDarkly provides feature flag SDKs for - * [Java](http://docs.launchdarkly.com/docs/java-sdk-reference "Java SDK") + * [Java](http://docs.launchdarkly.com/docs/java-sdk-reference "LaunchDarkly Java SDK") * [JavaScript](http://docs.launchdarkly.com/docs/js-sdk-reference "LaunchDarkly JavaScript SDK") * [PHP](http://docs.launchdarkly.com/docs/php-sdk-reference "LaunchDarkly PHP SDK") * [Python](http://docs.launchdarkly.com/docs/python-sdk-reference "LaunchDarkly Python SDK") * [Go](http://docs.launchdarkly.com/docs/go-sdk-reference "LaunchDarkly Go SDK") * [Node.JS](http://docs.launchdarkly.com/docs/node-sdk-reference "LaunchDarkly Node SDK") + * [Electron](http://docs.launchdarkly.com/docs/electron-sdk-reference "LaunchDarkly Electron SDK") * [.NET](http://docs.launchdarkly.com/docs/dotnet-sdk-reference "LaunchDarkly .Net SDK") * [Ruby](http://docs.launchdarkly.com/docs/ruby-sdk-reference "LaunchDarkly Ruby SDK") * [iOS](http://docs.launchdarkly.com/docs/ios-sdk-reference "LaunchDarkly iOS SDK") * [Android](http://docs.launchdarkly.com/docs/android-sdk-reference "LaunchDarkly Android SDK") + * [C/C++](http://docs.launchdarkly.com/docs/c-sdk-reference "LaunchDarkly C/C++ SDK") * Explore LaunchDarkly * [launchdarkly.com](http://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information * [docs.launchdarkly.com](http://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDKs diff --git a/ldclient/client.py b/ldclient/client.py index 8ae8a5a1..d1759f6f 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -166,6 +166,10 @@ def _send_event(self, event): def track(self, event_name, user, data=None): """Tracks that a user performed an event. + LaunchDarkly automatically tracks pageviews and clicks that are specified in the Goals + section of the dashboard. This can be used to track custom goals or other events that do + not currently have goals. + :param string event_name: the name of the event, which may correspond to a goal in A/B tests :param dict user: the attributes of the user :param data: optional additional data associated with the event @@ -199,12 +203,17 @@ def is_offline(self): def is_initialized(self): """Returns true if the client has successfully connected to LaunchDarkly. + If this returns false, it means that the client has not yet successfully connected to LaunchDarkly. + It might still be in the process of starting up, or it might be attempting to reconnect after an + unsuccessful attempt, or it might have received an unrecoverable error (such as an invalid SDK key) + and given up. + :rtype: bool """ return self.is_offline() or self._config.use_ldd or self._update_processor.initialized() def flush(self): - """Flushes all pending events. + """Flushes all pending analytics events. Normally, batches of events are delivered in the background at intervals determined by the ``flush_interval`` property of :class:`ldclient.config.Config`. Calling ``flush()`` @@ -400,7 +409,8 @@ def all_flags_state(self, user, **kwargs): return state def secure_mode_hash(self, user): - """Generates a hash value for a user, for use by the JavaScript SDK. + """Computes an HMAC signature of a user signed with the client's SDK key, + for use with the JavaScript SDK. For more information, see the JavaScript SDK Reference Guide on `Secure mode `_. From 9731f4de40b50ba848256e8d23bab249464edbe2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 12 Feb 2019 16:05:01 -0800 Subject: [PATCH 083/190] add git placeholders for unused dirs --- docs/_static/.gitkeep | 0 docs/_templates/.gitkeep | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/_static/.gitkeep create mode 100644 docs/_templates/.gitkeep diff --git a/docs/_static/.gitkeep b/docs/_static/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/docs/_templates/.gitkeep b/docs/_templates/.gitkeep new file mode 100644 index 00000000..e69de29b From 87336db16da33840820858215949b5b88820c618 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 12 Feb 2019 16:16:35 -0800 Subject: [PATCH 084/190] use default theme --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index f1dc322b..10f481f3 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -83,7 +83,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'sphinx_rtd_theme' +#html_theme = 'sphinx_rtd_theme' # ReadTheDocs will set this # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the From 2dedbc407db52f99b840a42b89255c5b2e84821b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 25 Feb 2019 15:33:37 -0800 Subject: [PATCH 085/190] add experimentation event overrides for rules and fallthrough --- ldclient/client.py | 31 ++++++------- ldclient/flag.py | 19 +++----- ldclient/impl/event_factory.py | 71 +++++++++++++++++++++++++++++ testing/test_flag.py | 64 +++++++++++++------------- testing/test_ldclient.py | 69 ++++++++++++++++++++++++++-- testing/test_ldclient_evaluation.py | 2 +- 6 files changed, 190 insertions(+), 66 deletions(-) create mode 100644 ldclient/impl/event_factory.py diff --git a/ldclient/client.py b/ldclient/client.py index d1759f6f..32dae0ae 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -13,6 +13,7 @@ from ldclient.feature_store import _FeatureStoreDataSetSorter from ldclient.flag import EvaluationDetail, evaluate, error_reason from ldclient.flags_state import FeatureFlagsState +from ldclient.impl.event_factory import _EventFactory from ldclient.interfaces import FeatureStore from ldclient.polling import PollingUpdateProcessor from ldclient.streaming import StreamingUpdateProcessor @@ -90,6 +91,8 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): self._event_processor = None self._lock = Lock() + self._event_factory_default = _EventFactory(False) + self._event_factory_with_reasons = _EventFactory(True) self._store = _FeatureStoreClientWrapper(self._config.feature_store) """ :type: FeatureStore """ @@ -241,7 +244,7 @@ def variation(self, key, user, default): available from LaunchDarkly :return: one of the flag's variation values, or the default value """ - return self._evaluate_internal(key, user, default, False).value + return self._evaluate_internal(key, user, default, self._event_factory_default).value def variation_detail(self, key, user, default): """Determines the variation of a feature flag for a user, like :func:`variation()`, but also @@ -258,9 +261,9 @@ def variation_detail(self, key, user, default): :return: an object describing the result :rtype: EvaluationDetail """ - return self._evaluate_internal(key, user, default, True) + return self._evaluate_internal(key, user, default, self._event_factory_with_reasons) - def _evaluate_internal(self, key, user, default, include_reasons_in_events): + def _evaluate_internal(self, key, user, default, event_factory): default = self._config.get_default(key, default) if self._config.offline: @@ -269,14 +272,6 @@ def _evaluate_internal(self, key, user, default, include_reasons_in_events): if user is not None: self._sanitize_user(user) - def send_event(value, variation=None, flag=None, reason=None): - self._send_event({'kind': 'feature', 'key': key, 'user': user, - 'value': value, 'variation': variation, 'default': default, - 'version': flag.get('version') if flag else None, - 'trackEvents': flag.get('trackEvents') if flag else None, - 'debugEventsUntilDate': flag.get('debugEventsUntilDate') if flag else None, - 'reason': reason if include_reasons_in_events else None}) - if not self.is_initialized(): if self._store.initialized: log.warn("Feature Flag evaluation attempted before client has initialized - using last known values from feature store for feature key: " + key) @@ -284,7 +279,7 @@ def send_event(value, variation=None, flag=None, reason=None): log.warn("Feature Flag evaluation attempted before client has initialized! Feature store unavailable - returning default: " + str(default) + " for feature key: " + key) reason = error_reason('CLIENT_NOT_READY') - send_event(default, None, None, reason) + self._send_event(event_factory.new_unknown_flag_event(key, user, default, reason)) return EvaluationDetail(default, None, reason) if user is not None and user.get('key', "") == "": @@ -296,32 +291,32 @@ def send_event(value, variation=None, flag=None, reason=None): log.error("Unexpected error while retrieving feature flag \"%s\": %s" % (key, repr(e))) log.debug(traceback.format_exc()) reason = error_reason('EXCEPTION') - send_event(default, None, None, reason) + self._send_event(event_factory.new_unknown_flag_event(key, user, default, reason)) return EvaluationDetail(default, None, reason) if not flag: reason = error_reason('FLAG_NOT_FOUND') - send_event(default, None, None, reason) + self._send_event(event_factory.new_unknown_flag_event(key, user, default, reason)) return EvaluationDetail(default, None, reason) else: if user is None or user.get('key') is None: reason = error_reason('USER_NOT_SPECIFIED') - send_event(default, None, flag, reason) + self._send_event(event_factory.new_default_event(flag, user, default, reason)) return EvaluationDetail(default, None, reason) try: - result = evaluate(flag, user, self._store, include_reasons_in_events) + result = evaluate(flag, user, self._store, event_factory) for event in result.events or []: self._send_event(event) detail = result.detail if detail.is_default_value(): detail = EvaluationDetail(default, None, detail.reason) - send_event(detail.value, detail.variation_index, flag, detail.reason) + self._send_event(event_factory.new_eval_event(flag, user, detail, default)) return detail except Exception as e: log.error("Unexpected error while evaluating feature flag \"%s\": %s" % (key, repr(e))) log.debug(traceback.format_exc()) reason = error_reason('EXCEPTION') - send_event(default, None, flag, reason) + self._send_event(event_factory.new_default_event(flag, user, default, reason)) return EvaluationDetail(default, None, reason) def all_flags(self, user): diff --git a/ldclient/flag.py b/ldclient/flag.py index 88739ba0..65f2812a 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -105,16 +105,16 @@ def error_reason(error_kind): return {'kind': 'ERROR', 'errorKind': error_kind} -def evaluate(flag, user, store, include_reasons_in_events = False): +def evaluate(flag, user, store, event_factory): prereq_events = [] - detail = _evaluate(flag, user, store, prereq_events, include_reasons_in_events) + detail = _evaluate(flag, user, store, prereq_events, event_factory) return EvalResult(detail = detail, events = prereq_events) -def _evaluate(flag, user, store, prereq_events, include_reasons_in_events): +def _evaluate(flag, user, store, prereq_events, event_factory): if not flag.get('on', False): return _get_off_value(flag, {'kind': 'OFF'}) - prereq_failure_reason = _check_prerequisites(flag, user, store, prereq_events, include_reasons_in_events) + prereq_failure_reason = _check_prerequisites(flag, user, store, prereq_events, event_factory) if prereq_failure_reason is not None: return _get_off_value(flag, prereq_failure_reason) @@ -135,7 +135,7 @@ def _evaluate(flag, user, store, prereq_events, include_reasons_in_events): return _get_value_for_variation_or_rollout(flag, flag['fallthrough'], user, {'kind': 'FALLTHROUGH'}) -def _check_prerequisites(flag, user, store, events, include_reasons_in_events): +def _check_prerequisites(flag, user, store, events, event_factory): failed_prereq = None prereq_res = None for prereq in flag.get('prerequisites') or []: @@ -144,17 +144,12 @@ def _check_prerequisites(flag, user, store, events, include_reasons_in_events): log.warn("Missing prereq flag: " + prereq.get('key')) failed_prereq = prereq else: - prereq_res = _evaluate(prereq_flag, user, store, events, include_reasons_in_events) + prereq_res = _evaluate(prereq_flag, user, store, events, event_factory) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if (not prereq_flag.get('on', False)) or prereq_res.variation_index != prereq.get('variation'): failed_prereq = prereq - event = {'kind': 'feature', 'key': prereq.get('key'), 'user': user, - 'variation': prereq_res.variation_index, 'value': prereq_res.value, - 'version': prereq_flag.get('version'), 'prereqOf': flag.get('key'), - 'trackEvents': prereq_flag.get('trackEvents'), - 'debugEventsUntilDate': prereq_flag.get('debugEventsUntilDate'), - 'reason': prereq_res.reason if prereq_res and include_reasons_in_events else None} + event = event_factory.new_eval_event(prereq_flag, user, prereq_res, None, flag) events.append(event) if failed_prereq: return {'kind': 'PREREQUISITE_FAILED', 'prerequisiteKey': failed_prereq.get('key')} diff --git a/ldclient/impl/event_factory.py b/ldclient/impl/event_factory.py new file mode 100644 index 00000000..0c6fcc8c --- /dev/null +++ b/ldclient/impl/event_factory.py @@ -0,0 +1,71 @@ + +# Event constructors are centralized here to avoid mistakes and repetitive logic. +# The LDClient owns two instances of _EventFactory: one that always embeds evaluation reasons +# in the events (for when variation_detail is called) and one that doesn't. +class _EventFactory(object): + def __init__(self, with_reasons): + self._with_reasons = with_reasons + + def new_eval_event(self, flag, user, detail, default_value, prereq_of_flag = None): + add_experiment_data = self._is_experiment(flag, detail.reason) + e = { + 'kind': 'feature', + 'key': flag.get('key'), + 'user': user, + 'value': detail.value, + 'variation': detail.variation_index, + 'default': default_value, + 'version': flag.get('version') + } + # the following properties are handled separately so we don't waste bandwidth on unused keys + if add_experiment_data or flag.get('trackEvents', False): + e['trackEvents'] = True + if flag.get('debugEventsUntilDate', None): + e['debugEventsUntilDate'] = flag.get('debugEventsUntilDate') + if prereq_of_flag is not None: + e['prereqOf'] = prereq_of_flag.get('key') + if add_experiment_data or self._with_reasons: + e['reason'] = detail.reason + return e + + def new_default_event(self, flag, user, default_value, reason): + add_experiment_data = self._is_experiment(flag, reason) + e = { + 'kind': 'feature', + 'key': flag.get('key'), + 'user': user, + 'value': default_value, + 'default': default_value, + 'version': flag.get('version') + } + # the following properties are handled separately so we don't waste bandwidth on unused keys + if add_experiment_data or flag.get('trackEvents', False): + e['trackEvents'] = True + if flag.get('debugEventsUntilDate', None): + e['debugEventsUntilDate'] = flag.get('debugEventsUntilDate') + if add_experiment_data or self._with_reasons: + e['reason'] = reason + return e + + def new_unknown_flag_event(self, key, user, default_value, reason): + e = { + 'kind': 'feature', + 'key': key, + 'user': user, + 'value': default_value, + 'default': default_value + } + if self._with_reasons: + e['reason'] = reason + return e + + def _is_experiment(self, flag, reason): + if reason is not None: + kind = reason['kind'] + if kind == 'RULE_MATCH': + index = reason['ruleIndex'] + rules = flag.get('rules') or [] + return index >= 0 and index < len(rules) and rules[index].get('trackEvents', False) + elif kind == 'FALLTHROUGH': + return flag.get('trackEventsFallthrough', False) + return False diff --git a/testing/test_flag.py b/testing/test_flag.py index 97f64af0..9ebd56b5 100644 --- a/testing/test_flag.py +++ b/testing/test_flag.py @@ -1,10 +1,12 @@ import pytest from ldclient.feature_store import InMemoryFeatureStore from ldclient.flag import EvaluationDetail, EvalResult, _bucket_user, evaluate +from ldclient.impl.event_factory import _EventFactory from ldclient.versioned_data_kind import FEATURES, SEGMENTS empty_store = InMemoryFeatureStore() +event_factory = _EventFactory(False) def make_boolean_flag_with_rules(rules): @@ -27,7 +29,7 @@ def test_flag_returns_off_variation_if_flag_is_off(): } user = { 'key': 'x' } detail = EvaluationDetail('b', 1, {'kind': 'OFF'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_none_if_flag_is_off_and_off_variation_is_unspecified(): flag = { @@ -37,7 +39,7 @@ def test_flag_returns_none_if_flag_is_off_and_off_variation_is_unspecified(): } user = { 'key': 'x' } detail = EvaluationDetail(None, None, {'kind': 'OFF'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_error_if_off_variation_is_too_high(): flag = { @@ -48,7 +50,7 @@ def test_flag_returns_error_if_off_variation_is_too_high(): } user = { 'key': 'x' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_error_if_off_variation_is_negative(): flag = { @@ -59,7 +61,7 @@ def test_flag_returns_error_if_off_variation_is_negative(): } user = { 'key': 'x' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_off_variation_if_prerequisite_not_found(): flag = { @@ -72,7 +74,7 @@ def test_flag_returns_off_variation_if_prerequisite_not_found(): } user = { 'key': 'x' } detail = EvaluationDetail('b', 1, {'kind': 'PREREQUISITE_FAILED', 'prerequisiteKey': 'badfeature'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_off_variation_and_event_if_prerequisite_is_off(): store = InMemoryFeatureStore() @@ -98,9 +100,9 @@ def test_flag_returns_off_variation_and_event_if_prerequisite_is_off(): store.upsert(FEATURES, flag1) user = { 'key': 'x' } detail = EvaluationDetail('b', 1, {'kind': 'PREREQUISITE_FAILED', 'prerequisiteKey': 'feature1'}) - events_should_be = [{'kind': 'feature', 'key': 'feature1', 'variation': 1, 'value': 'e', - 'version': 2, 'user': user, 'prereqOf': 'feature0', 'trackEvents': False, 'debugEventsUntilDate': None, 'reason': None}] - assert evaluate(flag, user, store) == EvalResult(detail, events_should_be) + events_should_be = [{'kind': 'feature', 'key': 'feature1', 'variation': 1, 'value': 'e', 'default': None, + 'version': 2, 'user': user, 'prereqOf': 'feature0'}] + assert evaluate(flag, user, store, event_factory) == EvalResult(detail, events_should_be) def test_flag_returns_off_variation_and_event_if_prerequisite_is_not_met(): store = InMemoryFeatureStore() @@ -124,9 +126,9 @@ def test_flag_returns_off_variation_and_event_if_prerequisite_is_not_met(): store.upsert(FEATURES, flag1) user = { 'key': 'x' } detail = EvaluationDetail('b', 1, {'kind': 'PREREQUISITE_FAILED', 'prerequisiteKey': 'feature1'}) - events_should_be = [{'kind': 'feature', 'key': 'feature1', 'variation': 0, 'value': 'd', - 'version': 2, 'user': user, 'prereqOf': 'feature0', 'trackEvents': False, 'debugEventsUntilDate': None, 'reason': None}] - assert evaluate(flag, user, store) == EvalResult(detail, events_should_be) + events_should_be = [{'kind': 'feature', 'key': 'feature1', 'variation': 0, 'value': 'd', 'default': None, + 'version': 2, 'user': user, 'prereqOf': 'feature0'}] + assert evaluate(flag, user, store, event_factory) == EvalResult(detail, events_should_be) def test_flag_returns_fallthrough_and_event_if_prereq_is_met_and_there_are_no_rules(): store = InMemoryFeatureStore() @@ -150,9 +152,9 @@ def test_flag_returns_fallthrough_and_event_if_prereq_is_met_and_there_are_no_ru store.upsert(FEATURES, flag1) user = { 'key': 'x' } detail = EvaluationDetail('a', 0, {'kind': 'FALLTHROUGH'}) - events_should_be = [{'kind': 'feature', 'key': 'feature1', 'variation': 1, 'value': 'e', - 'version': 2, 'user': user, 'prereqOf': 'feature0', 'trackEvents': False, 'debugEventsUntilDate': None, 'reason': None}] - assert evaluate(flag, user, store) == EvalResult(detail, events_should_be) + events_should_be = [{'kind': 'feature', 'key': 'feature1', 'variation': 1, 'value': 'e', 'default': None, + 'version': 2, 'user': user, 'prereqOf': 'feature0'}] + assert evaluate(flag, user, store, event_factory) == EvalResult(detail, events_should_be) def test_flag_returns_error_if_fallthrough_variation_is_too_high(): flag = { @@ -163,7 +165,7 @@ def test_flag_returns_error_if_fallthrough_variation_is_too_high(): } user = { 'key': 'x' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_error_if_fallthrough_variation_is_negative(): flag = { @@ -174,7 +176,7 @@ def test_flag_returns_error_if_fallthrough_variation_is_negative(): } user = { 'key': 'x' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_error_if_fallthrough_has_no_variation_or_rollout(): flag = { @@ -185,7 +187,7 @@ def test_flag_returns_error_if_fallthrough_has_no_variation_or_rollout(): } user = { 'key': 'x' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_error_if_fallthrough_has_rollout_with_no_variations(): flag = { @@ -197,7 +199,7 @@ def test_flag_returns_error_if_fallthrough_has_rollout_with_no_variations(): } user = { 'key': 'x' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_matches_user_from_targets(): flag = { @@ -210,35 +212,35 @@ def test_flag_matches_user_from_targets(): } user = { 'key': 'userkey' } detail = EvaluationDetail('c', 2, {'kind': 'TARGET_MATCH'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_matches_user_from_rules(): rule = { 'id': 'id', 'clauses': [{'attribute': 'key', 'op': 'in', 'values': ['userkey']}], 'variation': 1} flag = make_boolean_flag_with_rules([rule]) user = { 'key': 'userkey' } detail = EvaluationDetail(True, 1, {'kind': 'RULE_MATCH', 'ruleIndex': 0, 'ruleId': 'id'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_error_if_rule_variation_is_too_high(): rule = { 'id': 'id', 'clauses': [{'attribute': 'key', 'op': 'in', 'values': ['userkey']}], 'variation': 999} flag = make_boolean_flag_with_rules([rule]) user = { 'key': 'userkey' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_error_if_rule_variation_is_negative(): rule = { 'id': 'id', 'clauses': [{'attribute': 'key', 'op': 'in', 'values': ['userkey']}], 'variation': -1} flag = make_boolean_flag_with_rules([rule]) user = { 'key': 'userkey' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_error_if_rule_has_no_variation_or_rollout(): rule = { 'id': 'id', 'clauses': [{'attribute': 'key', 'op': 'in', 'values': ['userkey']}]} flag = make_boolean_flag_with_rules([rule]) user = { 'key': 'userkey' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_flag_returns_error_if_rule_has_rollout_with_no_variations(): rule = { 'id': 'id', 'clauses': [{'attribute': 'key', 'op': 'in', 'values': ['userkey']}], @@ -246,7 +248,7 @@ def test_flag_returns_error_if_rule_has_rollout_with_no_variations(): flag = make_boolean_flag_with_rules([rule]) user = { 'key': 'userkey' } detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) - assert evaluate(flag, user, empty_store) == EvalResult(detail, []) + assert evaluate(flag, user, empty_store, event_factory) == EvalResult(detail, []) def test_segment_match_clause_retrieves_segment_from_store(): store = InMemoryFeatureStore() @@ -277,7 +279,7 @@ def test_segment_match_clause_retrieves_segment_from_store(): ] } - assert evaluate(flag, user, store).detail.value == True + assert evaluate(flag, user, store, event_factory).detail.value == True def test_segment_match_clause_falls_through_with_no_errors_if_segment_not_found(): user = { "key": "foo" } @@ -300,7 +302,7 @@ def test_segment_match_clause_falls_through_with_no_errors_if_segment_not_found( ] } - assert evaluate(flag, user, empty_store).detail.value == False + assert evaluate(flag, user, empty_store, event_factory).detail.value == False def test_clause_matches_builtin_attribute(): clause = { @@ -310,7 +312,7 @@ def test_clause_matches_builtin_attribute(): } user = { 'key': 'x', 'name': 'Bob' } flag = _make_bool_flag_from_clause(clause) - assert evaluate(flag, user, empty_store).detail.value == True + assert evaluate(flag, user, empty_store, event_factory).detail.value == True def test_clause_matches_custom_attribute(): clause = { @@ -320,7 +322,7 @@ def test_clause_matches_custom_attribute(): } user = { 'key': 'x', 'name': 'Bob', 'custom': { 'legs': 4 } } flag = _make_bool_flag_from_clause(clause) - assert evaluate(flag, user, empty_store).detail.value == True + assert evaluate(flag, user, empty_store, event_factory).detail.value == True def test_clause_returns_false_for_missing_attribute(): clause = { @@ -330,7 +332,7 @@ def test_clause_returns_false_for_missing_attribute(): } user = { 'key': 'x', 'name': 'Bob' } flag = _make_bool_flag_from_clause(clause) - assert evaluate(flag, user, empty_store).detail.value == False + assert evaluate(flag, user, empty_store, event_factory).detail.value == False def test_clause_can_be_negated(): clause = { @@ -341,7 +343,7 @@ def test_clause_can_be_negated(): } user = { 'key': 'x', 'name': 'Bob' } flag = _make_bool_flag_from_clause(clause) - assert evaluate(flag, user, empty_store).detail.value == False + assert evaluate(flag, user, empty_store, event_factory).detail.value == False def _make_bool_flag_from_clause(clause): @@ -374,7 +376,6 @@ def test_bucket_by_user_key(): assert bucket == pytest.approx(0.10343106) def test_bucket_by_int_attr(): - feature = { u'key': u'hashKey', u'salt': u'saltyA' } user = { u'key': u'userKey', u'custom': { @@ -388,7 +389,6 @@ def test_bucket_by_int_attr(): assert bucket2 == bucket def test_bucket_by_float_attr_not_allowed(): - feature = { u'key': u'hashKey', u'salt': u'saltyA' } user = { u'key': u'userKey', u'custom': { diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 0e6c33a2..900d5947 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -200,6 +200,69 @@ def test_event_for_existing_feature_with_reason(): e['debugEventsUntilDate'] == 1000) +def test_event_for_existing_feature_with_tracked_rule(): + feature = { + 'key': 'feature.key', + 'version': 100, + 'salt': u'', + 'on': True, + 'rules': [ + { + 'clauses': [ + { 'attribute': 'key', 'op': 'in', 'values': [ user['key'] ] } + ], + 'variation': 0, + 'trackEvents': True, + 'id': 'rule_id' + } + ], + 'variations': [ 'value' ] + } + store = InMemoryFeatureStore() + store.init({FEATURES: {feature['key']: feature}}) + client = make_client(store) + assert 'value' == client.variation(feature['key'], user, default='default') + e = get_first_event(client) + assert (e['kind'] == 'feature' and + e['key'] == feature['key'] and + e['user'] == user and + e['version'] == feature['version'] and + e['value'] == 'value' and + e['variation'] == 0 and + e['reason'] == { 'kind': 'RULE_MATCH', 'ruleIndex': 0, 'ruleId': 'rule_id' } and + e['default'] == 'default' and + e['trackEvents'] == True and + e.get('debugEventsUntilDate') is None) + + +def test_event_for_existing_feature_with_tracked_fallthrough(): + feature = { + 'key': 'feature.key', + 'version': 100, + 'salt': u'', + 'on': True, + 'rules': [], + 'fallthrough': { 'variation': 0 }, + 'variations': [ 'value' ], + 'trackEventsFallthrough': True + } + store = InMemoryFeatureStore() + store.init({FEATURES: {feature['key']: feature}}) + client = make_client(store) + assert 'value' == client.variation(feature['key'], user, default='default') + e = get_first_event(client) + assert (e['kind'] == 'feature' and + e['key'] == feature['key'] and + e['user'] == user and + e['version'] == feature['version'] and + e['value'] == 'value' and + e['variation'] == 0 and + e['reason'] == { 'kind': 'FALLTHROUGH' } and + e['default'] == 'default' and + e['trackEvents'] == True and + e.get('debugEventsUntilDate') is None) + + def test_event_for_unknown_feature(): store = InMemoryFeatureStore() store.init({FEATURES: {}}) @@ -210,7 +273,7 @@ def test_event_for_unknown_feature(): e['key'] == 'feature.key' and e['user'] == user and e['value'] == 'default' and - e['variation'] == None and + e.get('variation') is None and e['default'] == 'default') @@ -228,7 +291,7 @@ def test_event_for_existing_feature_with_no_user(): e['user'] == None and e['version'] == feature['version'] and e['value'] == 'default' and - e['variation'] == None and + e.get('variation') is None and e['default'] == 'default' and e['trackEvents'] == True and e['debugEventsUntilDate'] == 1000) @@ -249,7 +312,7 @@ def test_event_for_existing_feature_with_no_user_key(): e['user'] == bad_user and e['version'] == feature['version'] and e['value'] == 'default' and - e['variation'] == None and + e.get('variation') is None and e['default'] == 'default' and e['trackEvents'] == True and e['debugEventsUntilDate'] == 1000) diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index be925a5c..f716c5de 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -123,7 +123,7 @@ def test_variation_detail_when_user_is_none(): expected = EvaluationDetail('default', None, {'kind': 'ERROR', 'errorKind': 'USER_NOT_SPECIFIED'}) assert expected == client.variation_detail('feature.key', None, default='default') -def test_variation_when_user_has_no_key(): +def test_variation_detail_when_user_has_no_key(): feature = make_off_flag_with_value('feature.key', 'value') store = InMemoryFeatureStore() store.init({FEATURES: {'feature.key': feature}}) From 6846ba16dbad54ebdbe51039ce0d2e69005bf101 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 25 Feb 2019 15:44:01 -0800 Subject: [PATCH 086/190] a little more test coverage --- testing/test_ldclient.py | 63 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 63 insertions(+) diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 900d5947..1293d19a 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -235,6 +235,41 @@ def test_event_for_existing_feature_with_tracked_rule(): e.get('debugEventsUntilDate') is None) +def test_event_for_existing_feature_with_untracked_rule(): + feature = { + 'key': 'feature.key', + 'version': 100, + 'salt': u'', + 'on': True, + 'rules': [ + { + 'clauses': [ + { 'attribute': 'key', 'op': 'in', 'values': [ user['key'] ] } + ], + 'variation': 0, + 'trackEvents': False, + 'id': 'rule_id' + } + ], + 'variations': [ 'value' ] + } + store = InMemoryFeatureStore() + store.init({FEATURES: {feature['key']: feature}}) + client = make_client(store) + assert 'value' == client.variation(feature['key'], user, default='default') + e = get_first_event(client) + assert (e['kind'] == 'feature' and + e['key'] == feature['key'] and + e['user'] == user and + e['version'] == feature['version'] and + e['value'] == 'value' and + e['variation'] == 0 and + e.get('reason') is None and + e['default'] == 'default' and + e.get('trackEvents', False) == False and + e.get('debugEventsUntilDate') is None) + + def test_event_for_existing_feature_with_tracked_fallthrough(): feature = { 'key': 'feature.key', @@ -263,6 +298,34 @@ def test_event_for_existing_feature_with_tracked_fallthrough(): e.get('debugEventsUntilDate') is None) +def test_event_for_existing_feature_with_untracked_fallthrough(): + feature = { + 'key': 'feature.key', + 'version': 100, + 'salt': u'', + 'on': True, + 'rules': [], + 'fallthrough': { 'variation': 0 }, + 'variations': [ 'value' ], + 'trackEventsFallthrough': False + } + store = InMemoryFeatureStore() + store.init({FEATURES: {feature['key']: feature}}) + client = make_client(store) + assert 'value' == client.variation(feature['key'], user, default='default') + e = get_first_event(client) + assert (e['kind'] == 'feature' and + e['key'] == feature['key'] and + e['user'] == user and + e['version'] == feature['version'] and + e['value'] == 'value' and + e['variation'] == 0 and + e.get('reason') is None and + e['default'] == 'default' and + e.get('trackEvents', False) == False and + e.get('debugEventsUntilDate') is None) + + def test_event_for_unknown_feature(): store = InMemoryFeatureStore() store.init({FEATURES: {}}) From c514216e4c97ce19fe38422dc302448ad1b3d7b6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 25 Feb 2019 16:14:20 -0800 Subject: [PATCH 087/190] rm unnecessary logic --- ldclient/impl/event_factory.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/ldclient/impl/event_factory.py b/ldclient/impl/event_factory.py index 0c6fcc8c..f738fd69 100644 --- a/ldclient/impl/event_factory.py +++ b/ldclient/impl/event_factory.py @@ -29,7 +29,6 @@ def new_eval_event(self, flag, user, detail, default_value, prereq_of_flag = Non return e def new_default_event(self, flag, user, default_value, reason): - add_experiment_data = self._is_experiment(flag, reason) e = { 'kind': 'feature', 'key': flag.get('key'), @@ -39,11 +38,11 @@ def new_default_event(self, flag, user, default_value, reason): 'version': flag.get('version') } # the following properties are handled separately so we don't waste bandwidth on unused keys - if add_experiment_data or flag.get('trackEvents', False): + if flag.get('trackEvents', False): e['trackEvents'] = True if flag.get('debugEventsUntilDate', None): e['debugEventsUntilDate'] = flag.get('debugEventsUntilDate') - if add_experiment_data or self._with_reasons: + if self._with_reasons: e['reason'] = reason return e From afab05deea1575064c637b37e004d5bb555a1c97 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 26 Feb 2019 15:14:04 -0800 Subject: [PATCH 088/190] more factory methods --- ldclient/client.py | 4 ++-- ldclient/impl/event_factory.py | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 32dae0ae..cff6f1d6 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -180,7 +180,7 @@ def track(self, event_name, user, data=None): self._sanitize_user(user) if user is None or user.get('key') is None: log.warn("Missing user or user key when calling track().") - self._send_event({'kind': 'custom', 'key': event_name, 'user': user, 'data': data}) + self._send_event(self._event_factory_default.new_custom_event(event_name, user, data)) def identify(self, user): """Registers the user. @@ -194,7 +194,7 @@ def identify(self, user): self._sanitize_user(user) if user is None or user.get('key') is None: log.warn("Missing user or user key when calling identify().") - self._send_event({'kind': 'identify', 'key': user.get('key'), 'user': user}) + self._send_event(self._event_factory_default.new_identify_event(user)) def is_offline(self): """Returns true if the client is in offline mode. diff --git a/ldclient/impl/event_factory.py b/ldclient/impl/event_factory.py index f738fd69..7b8b725f 100644 --- a/ldclient/impl/event_factory.py +++ b/ldclient/impl/event_factory.py @@ -2,6 +2,10 @@ # Event constructors are centralized here to avoid mistakes and repetitive logic. # The LDClient owns two instances of _EventFactory: one that always embeds evaluation reasons # in the events (for when variation_detail is called) and one that doesn't. +# +# Note that none of these methods fill in the "creationDate" property, because in the Python +# client, that is done by DefaultEventProcessor.send_event(). + class _EventFactory(object): def __init__(self, with_reasons): self._with_reasons = with_reasons @@ -58,6 +62,21 @@ def new_unknown_flag_event(self, key, user, default_value, reason): e['reason'] = reason return e + def new_identify_event(self, user): + return { + 'kind': 'identify', + 'key': user.get('key'), + 'user': user + } + + def new_custom_event(self, event_name, user, data): + return { + 'kind': 'custom', + 'key': event_name, + 'user': user, + 'data': data + } + def _is_experiment(self, flag, reason): if reason is not None: kind = reason['kind'] From 84198a3b03b15c49bfaa0fb1604d3702e71422a8 Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Fri, 1 Mar 2019 23:16:23 +0000 Subject: [PATCH 089/190] try python -m instead of pytest directly --- azure-pipelines.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index b7f19ff3..a09727ec 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -49,4 +49,4 @@ jobs: pip install -r consul-requirements.txt python setup.py install mkdir test-reports - pytest -s --junitxml=test-reports/junit.xml testing; + python -m pytest -s --junitxml=test-reports/junit.xml testing; From 80411dd8054bbcdf7e1aa6eb15e346f9988e58b7 Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Fri, 1 Mar 2019 23:23:49 +0000 Subject: [PATCH 090/190] add setuptools --- azure-pipelines.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a09727ec..c3d5980f 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,6 +45,7 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | python --version + pip install setuptools pip install -r test-requirements.txt pip install -r consul-requirements.txt python setup.py install From 52c0a195337d009502783dd11c0796436231017f Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Fri, 1 Mar 2019 23:28:29 +0000 Subject: [PATCH 091/190] use python -m for all of pip --- azure-pipelines.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index c3d5980f..68418351 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,9 +45,9 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | python --version - pip install setuptools - pip install -r test-requirements.txt - pip install -r consul-requirements.txt + python -m pip install setuptools + python -m pip install -r test-requirements.txt + python -m pip install -r consul-requirements.txt python setup.py install mkdir test-reports python -m pytest -s --junitxml=test-reports/junit.xml testing; From 5bdea5f7dd7d3210eaca482e16fce3e857044e5f Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Fri, 1 Mar 2019 23:35:48 +0000 Subject: [PATCH 092/190] add UsePythonVersion task --- azure-pipelines.yml | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 68418351..27ab27d2 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -38,6 +38,10 @@ jobs: cd redis ./redis-server --service-install ./redis-server --service-start + - task: UsePythonVersion@0 + inputs: + versionSpec: '2.7' + addToPath: true - task: PowerShell@2 displayName: 'Setup SDK and Test' inputs: @@ -45,9 +49,9 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | python --version - python -m pip install setuptools - python -m pip install -r test-requirements.txt - python -m pip install -r consul-requirements.txt + pip install setuptools + pip install -r test-requirements.txt + pip install -r consul-requirements.txt python setup.py install mkdir test-reports python -m pytest -s --junitxml=test-reports/junit.xml testing; From 60a66a8c1ad1ffbc7b5c6f52f08edd15bc5cb5ce Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Fri, 1 Mar 2019 23:38:56 +0000 Subject: [PATCH 093/190] fix indent --- azure-pipelines.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 27ab27d2..f27c0d01 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -40,8 +40,8 @@ jobs: ./redis-server --service-start - task: UsePythonVersion@0 inputs: - versionSpec: '2.7' - addToPath: true + versionSpec: '2.7' + addToPath: true - task: PowerShell@2 displayName: 'Setup SDK and Test' inputs: From 1907d75eb9da888ca06e5a3af6984cebe6b0490d Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Fri, 1 Mar 2019 23:42:52 +0000 Subject: [PATCH 094/190] remove manually adding setuptools --- azure-pipelines.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index f27c0d01..be768073 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -49,7 +49,6 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | python --version - pip install setuptools pip install -r test-requirements.txt pip install -r consul-requirements.txt python setup.py install From 7cdf9fcf517f48f88c5adbc72879f4b4513f3c53 Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Sat, 2 Mar 2019 00:07:07 +0000 Subject: [PATCH 095/190] add on 3.7 stages --- azure-pipelines.yml | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index be768073..a1f89bf3 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -43,7 +43,7 @@ jobs: versionSpec: '2.7' addToPath: true - task: PowerShell@2 - displayName: 'Setup SDK and Test' + displayName: 'Setup SDK and Test 2.7' inputs: targetType: inline workingDirectory: $(System.DefaultWorkingDirectory) @@ -53,4 +53,20 @@ jobs: pip install -r consul-requirements.txt python setup.py install mkdir test-reports - python -m pytest -s --junitxml=test-reports/junit.xml testing; + python -m pytest -s --junitxml=test-reports27/junit.xml testing; + - task: UsePythonVersion@0 + inputs: + versionSpec: '3.7' + addToPath: true + - task: PowerShell@2 + displayName: 'Setup SDK and Test 3.7' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + python --version + pip install -r test-requirements.txt + pip install -r consul-requirements.txt + python setup.py install + mkdir test-reports + python -m pytest -s --junitxml=test-reports37/junit.xml testing; From 1023d45a0631699698b6f240c45d1105be52f448 Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Sat, 2 Mar 2019 00:12:22 +0000 Subject: [PATCH 096/190] fix mkdir for reports --- azure-pipelines.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a1f89bf3..126e5bf3 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -52,7 +52,7 @@ jobs: pip install -r test-requirements.txt pip install -r consul-requirements.txt python setup.py install - mkdir test-reports + mkdir test-reports27 python -m pytest -s --junitxml=test-reports27/junit.xml testing; - task: UsePythonVersion@0 inputs: @@ -68,5 +68,5 @@ jobs: pip install -r test-requirements.txt pip install -r consul-requirements.txt python setup.py install - mkdir test-reports + mkdir test-reports37 python -m pytest -s --junitxml=test-reports37/junit.xml testing; From b9778b68b74d486e37d1770d1cfe02853c2d1fc5 Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Sat, 2 Mar 2019 00:17:37 +0000 Subject: [PATCH 097/190] upload test artifacts --- azure-pipelines.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 126e5bf3..af1f3342 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -70,3 +70,15 @@ jobs: python setup.py install mkdir test-reports37 python -m pytest -s --junitxml=test-reports37/junit.xml testing; + - task: CopyFiles@2 + inputs: + targetFolder: $(Build.ArtifactStagingDirectory)/test-reports27 + sourceFolder: $(System.DefaultWorkingDirectory)/test-reports27 + - task: CopyFiles@2 + inputs: + targetFolder: $(Build.ArtifactStagingDirectory)/test-reports37 + sourceFolder: $(System.DefaultWorkingDirectory)/test-reports37 + - task: PublishBuildArtifacts@1 + inputs: + pathtoPublish: '$(Build.ArtifactStagingDirectory)' + artifactName: reports From e5d5e4135bbc525e1996e6af81363e0f5cd7ecd1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 26 Mar 2019 11:18:15 -0700 Subject: [PATCH 098/190] skip trying to load pyyaml in Python 3.3 --- .circleci/config.yml | 3 +++ test-requirements.txt | 1 - test-yaml-requirements.txt | 1 + testing/test_file_data_source.py | 9 +++++++++ 4 files changed, 13 insertions(+), 1 deletion(-) create mode 100644 test-yaml-requirements.txt diff --git a/.circleci/config.yml b/.circleci/config.yml index 603bbf54..a0d2c45f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -17,6 +17,9 @@ test-template: &test-template command: | sudo pip install --upgrade pip virtualenv; sudo pip install -r test-requirements.txt; + if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then + sudo pip install -r test-yaml-requirements.txt; + fi; if [[ "$CIRCLE_JOB" != "test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then sudo pip install -r consul-requirements.txt; fi; diff --git a/test-requirements.txt b/test-requirements.txt index 3bc09d90..ccde3818 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,5 +7,4 @@ jsonpickle==0.9.3 pytest-capturelog>=0.7 pytest-cov>=2.4.0 codeclimate-test-reporter>=0.2.1 -pyyaml>=3.0 watchdog>=0.9 diff --git a/test-yaml-requirements.txt b/test-yaml-requirements.txt new file mode 100644 index 00000000..fb5e7f76 --- /dev/null +++ b/test-yaml-requirements.txt @@ -0,0 +1 @@ +pyyaml>=3.0 diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 2e232ec8..277c9312 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -12,6 +12,13 @@ from ldclient.integrations import Files from ldclient.versioned_data_kind import FEATURES, SEGMENTS +have_yaml = False +try: + import yaml + have_yaml = True +except ImportError: + pass + all_flag_keys = [ 'flag1', 'flag2' ] all_properties_json = ''' @@ -128,6 +135,8 @@ def test_loads_flags_on_start_from_json(): os.remove(path) def test_loads_flags_on_start_from_yaml(): + if not have_yaml: + return path = make_temp_file(all_properties_yaml) try: source = make_data_source(paths = path) From fd883cdeef56ae05e1958392d2da0da2cae3ca28 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 26 Mar 2019 12:24:03 -0700 Subject: [PATCH 099/190] can't use watchdog in Python 3.3 --- .circleci/config.yml | 2 +- test-filesource-optional-requirements.txt | 2 ++ test-requirements.txt | 1 - test-yaml-requirements.txt | 1 - 4 files changed, 3 insertions(+), 3 deletions(-) create mode 100644 test-filesource-optional-requirements.txt delete mode 100644 test-yaml-requirements.txt diff --git a/.circleci/config.yml b/.circleci/config.yml index a0d2c45f..46e2166e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -18,7 +18,7 @@ test-template: &test-template sudo pip install --upgrade pip virtualenv; sudo pip install -r test-requirements.txt; if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then - sudo pip install -r test-yaml-requirements.txt; + sudo pip install -r test-filesource-optional-requirements.txt; fi; if [[ "$CIRCLE_JOB" != "test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then sudo pip install -r consul-requirements.txt; diff --git a/test-filesource-optional-requirements.txt b/test-filesource-optional-requirements.txt new file mode 100644 index 00000000..e0a0e284 --- /dev/null +++ b/test-filesource-optional-requirements.txt @@ -0,0 +1,2 @@ +pyyaml>=3.0 +watchdog>=0.9 diff --git a/test-requirements.txt b/test-requirements.txt index ccde3818..bc5b43f2 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -7,4 +7,3 @@ jsonpickle==0.9.3 pytest-capturelog>=0.7 pytest-cov>=2.4.0 codeclimate-test-reporter>=0.2.1 -watchdog>=0.9 diff --git a/test-yaml-requirements.txt b/test-yaml-requirements.txt deleted file mode 100644 index fb5e7f76..00000000 --- a/test-yaml-requirements.txt +++ /dev/null @@ -1 +0,0 @@ -pyyaml>=3.0 From b3dc4c4f1f91bbbf87a0739525c052b3d57d37cc Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 26 Mar 2019 14:41:22 -0700 Subject: [PATCH 100/190] mark test as skipped --- testing/test_file_data_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 277c9312..78ab5359 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -136,7 +136,7 @@ def test_loads_flags_on_start_from_json(): def test_loads_flags_on_start_from_yaml(): if not have_yaml: - return + pytest.skip("skipping file source test with YAML because pyyaml isn't available") path = make_temp_file(all_properties_yaml) try: source = make_data_source(paths = path) From 803a79466dbf7be881aa98adac596241ce0e23de Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 28 Mar 2019 17:21:44 -0700 Subject: [PATCH 101/190] coerce user attributes into strings when necessary, don't send events without valid users --- ldclient/client.py | 8 +-- ldclient/event_processor.py | 25 +++++++--- ldclient/flag.py | 8 ++- ldclient/util.py | 14 ++++++ testing/test_event_processor.py | 86 ++++++++++++++++++++++++++++++++- testing/test_flag.py | 23 +++++++++ testing/test_ldclient.py | 39 --------------- 7 files changed, 150 insertions(+), 53 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index d1759f6f..edb9f28f 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -174,10 +174,10 @@ def track(self, event_name, user, data=None): :param dict user: the attributes of the user :param data: optional additional data associated with the event """ - self._sanitize_user(user) if user is None or user.get('key') is None: log.warn("Missing user or user key when calling track().") - self._send_event({'kind': 'custom', 'key': event_name, 'user': user, 'data': data}) + else: + self._send_event({'kind': 'custom', 'key': event_name, 'user': user, 'data': data}) def identify(self, user): """Registers the user. @@ -188,10 +188,10 @@ def identify(self, user): :param dict user: attributes of the user to register """ - self._sanitize_user(user) if user is None or user.get('key') is None: log.warn("Missing user or user key when calling identify().") - self._send_event({'kind': 'identify', 'key': user.get('key'), 'user': user}) + else: + self._send_event({'kind': 'identify', 'key': str(user.get('key')), 'user': user}) def is_offline(self): """Returns true if the client is in offline mode. diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 30619298..b5b0e370 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -29,11 +29,13 @@ from ldclient.util import _headers from ldclient.util import create_http_pool_manager from ldclient.util import log -from ldclient.util import http_error_message, is_http_error_recoverable, throw_if_unsuccessful_response +from ldclient.util import http_error_message, is_http_error_recoverable, stringify_attrs, throw_if_unsuccessful_response __MAX_FLUSH_THREADS__ = 5 __CURRENT_EVENT_SCHEMA__ = 3 +__USER_ATTRS_TO_STRINGIFY_FOR_EVENTS__ = [ "key", "secondary", "ip", "country", "email", "firstName", "lastName", "avatar", "name" ] + class NullEventProcessor(EventProcessor): def __init__(self): @@ -84,9 +86,9 @@ def make_output_event(self, e): 'prereqOf': e.get('prereqOf') } if self._inline_users or is_debug: - out['user'] = self._user_filter.filter_user_props(e['user']) + out['user'] = self._process_user(e) else: - out['userKey'] = e['user'].get('key') + out['userKey'] = self._get_userkey(e) if e.get('reason'): out['reason'] = e.get('reason') return out @@ -94,8 +96,8 @@ def make_output_event(self, e): return { 'kind': 'identify', 'creationDate': e['creationDate'], - 'key': e['user'].get('key'), - 'user': self._user_filter.filter_user_props(e['user']) + 'key': self._get_userkey(e), + 'user': self._process_user(e) } elif kind == 'custom': out = { @@ -105,15 +107,15 @@ def make_output_event(self, e): 'data': e.get('data') } if self._inline_users: - out['user'] = self._user_filter.filter_user_props(e['user']) + out['user'] = self._process_user(e) else: - out['userKey'] = e['user'].get('key') + out['userKey'] = self._get_userkey(e) return out elif kind == 'index': return { 'kind': 'index', 'creationDate': e['creationDate'], - 'user': self._user_filter.filter_user_props(e['user']) + 'user': self._process_user(e) } else: return e @@ -146,6 +148,13 @@ def make_summary_event(self, summary): 'endDate': summary.end_date, 'features': flags_out } + + def _process_user(self, event): + filtered = self._user_filter.filter_user_props(event['user']) + return stringify_attrs(filtered, __USER_ATTRS_TO_STRINGIFY_FOR_EVENTS__) + + def _get_userkey(self, event): + return str(event['user'].get('key')) class EventPayloadSendTask(object): diff --git a/ldclient/flag.py b/ldclient/flag.py index 88739ba0..dceb699c 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -10,6 +10,7 @@ import sys from ldclient import operators +from ldclient.util import stringify_attrs from ldclient.versioned_data_kind import FEATURES, SEGMENTS __LONG_SCALE__ = float(0xFFFFFFFFFFFFFFF) @@ -17,6 +18,10 @@ __BUILTINS__ = ["key", "ip", "country", "email", "firstName", "lastName", "avatar", "name", "anonymous"] +__USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION__ = [ "key", "secondary" ] +# Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. +# This is because it could affect evaluation results for existing users (ch35206). + log = logging.getLogger(sys.modules[__name__].__name__) @@ -106,8 +111,9 @@ def error_reason(error_kind): def evaluate(flag, user, store, include_reasons_in_events = False): + sanitized_user = stringify_attrs(user, __USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION__) prereq_events = [] - detail = _evaluate(flag, user, store, prereq_events, include_reasons_in_events) + detail = _evaluate(flag, sanitized_user, store, prereq_events, include_reasons_in_events) return EvalResult(detail = detail, events = prereq_events) def _evaluate(flag, user, store, prereq_events, include_reasons_in_events): diff --git a/ldclient/util.py b/ldclient/util.py index b1d533a2..229030b8 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -5,6 +5,7 @@ import certifi import logging +import six import sys import urllib3 @@ -111,3 +112,16 @@ def http_error_message(status, context, retryable_message = "will retry"): context, retryable_message if is_http_error_recoverable(status) else "giving up permanently" ) + + +def stringify_attrs(attrdict, attrs): + if attrdict is None: + return None + newdict = None + for attr in attrs: + val = attrdict.get(attr) + if val is not None and not isinstance(val, six.string_types): + if newdict is None: + newdict = attrdict.copy() + newdict[attr] = str(val) + return attrdict if newdict is None else newdict diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index f4ad9ab8..a2e110b2 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -17,6 +17,36 @@ 'key': 'userkey', 'privateAttrs': [ 'name' ] } +numeric_user = { + 'key': 1, + 'secondary': 2, + 'ip': 3, + 'country': 4, + 'email': 5, + 'firstName': 6, + 'lastName': 7, + 'avatar': 8, + 'name': 9, + 'anonymous': False, + 'custom': { + 'age': 99 + } +} +stringified_numeric_user = { + 'key': '1', + 'secondary': '2', + 'ip': '3', + 'country': '4', + 'email': '5', + 'firstName': '6', + 'lastName': '7', + 'avatar': '8', + 'name': '9', + 'anonymous': False, + 'custom': { + 'age': 99 + } +} ep = None mock_http = None @@ -65,6 +95,21 @@ def test_user_is_filtered_in_identify_event(): 'user': filtered_user }] +def test_user_attrs_are_stringified_in_identify_event(): + setup_processor(Config()) + + e = { 'kind': 'identify', 'user': numeric_user } + ep.send_event(e) + + output = flush_and_get_events() + assert len(output) == 1 + assert output == [{ + 'kind': 'identify', + 'creationDate': e['creationDate'], + 'key': stringified_numeric_user['key'], + 'user': stringified_numeric_user + }] + def test_individual_feature_event_is_queued_with_index_event(): setup_processor(Config()) @@ -95,6 +140,21 @@ def test_user_is_filtered_in_index_event(): check_feature_event(output[1], e, False, None) check_summary_event(output[2]) +def test_user_attrs_are_stringified_in_index_event(): + setup_processor(Config()) + + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': numeric_user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + ep.send_event(e) + + output = flush_and_get_events() + assert len(output) == 3 + check_index_event(output[0], e, stringified_numeric_user) + check_feature_event(output[1], e, False, None) + check_summary_event(output[2]) + def test_feature_event_can_contain_inline_user(): setup_processor(Config(inline_users_in_events = True)) @@ -123,6 +183,20 @@ def test_user_is_filtered_in_feature_event(): check_feature_event(output[0], e, False, filtered_user) check_summary_event(output[1]) +def test_user_attrs_are_stringified_in_feature_event(): + setup_processor(Config(inline_users_in_events = True)) + + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': numeric_user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + ep.send_event(e) + + output = flush_and_get_events() + assert len(output) == 2 + check_feature_event(output[0], e, False, stringified_numeric_user) + check_summary_event(output[1]) + def test_index_event_is_still_generated_if_inline_users_is_true_but_feature_event_is_not_tracked(): setup_processor(Config(inline_users_in_events = True)) @@ -346,6 +420,16 @@ def test_user_is_filtered_in_custom_event(): assert len(output) == 1 check_custom_event(output[0], e, filtered_user) +def test_user_attrs_are_stringified_in_custom_event(): + setup_processor(Config(inline_users_in_events = True)) + + e = { 'kind': 'custom', 'key': 'eventkey', 'user': numeric_user, 'data': { 'thing': 'stuff '} } + ep.send_event(e) + + output = flush_and_get_events() + assert len(output) == 1 + check_custom_event(output[0], e, stringified_numeric_user) + def test_nothing_is_sent_if_there_are_no_events(): setup_processor(Config()) ep.flush() @@ -426,7 +510,7 @@ def check_feature_event(data, source, debug, inline_user): assert data.get('value') == source.get('value') assert data.get('default') == source.get('default') if inline_user is None: - assert data['userKey'] == source['user']['key'] + assert data['userKey'] == str(source['user']['key']) else: assert data['user'] == inline_user diff --git a/testing/test_flag.py b/testing/test_flag.py index 97f64af0..9ca4b05a 100644 --- a/testing/test_flag.py +++ b/testing/test_flag.py @@ -248,6 +248,29 @@ def test_flag_returns_error_if_rule_has_rollout_with_no_variations(): detail = EvaluationDetail(None, None, {'kind': 'ERROR', 'errorKind': 'MALFORMED_FLAG'}) assert evaluate(flag, user, empty_store) == EvalResult(detail, []) +def test_user_key_is_coerced_to_string_for_evaluation(): + clause = { 'attribute': 'key', 'op': 'in', 'values': [ '999' ] } + flag = _make_bool_flag_from_clause(clause) + user = { 'key': 999 } + assert evaluate(flag, user, empty_store).detail.value == True + +def test_secondary_key_is_coerced_to_string_for_evaluation(): + # We can't really verify that the rollout calculation works correctly, but we can at least + # make sure it doesn't error out if there's a non-string secondary value (ch35189) + rule = { + 'id': 'ruleid', + 'clauses': [ + { 'attribute': 'key', 'op': 'in', 'values': [ 'userkey' ] } + ], + 'rollout': { + 'salt': '', + 'variations': [ { 'weight': 100000, 'variation': 1 } ] + } + } + flag = make_boolean_flag_with_rules([rule]) + user = { 'key': 'userkey', 'secondary': 999 } + assert evaluate(flag, user, empty_store).detail.value == True + def test_segment_match_clause_retrieves_segment_from_store(): store = InMemoryFeatureStore() segment = { diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 0e6c33a2..90bdeb4c 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -29,25 +29,6 @@ } } -numeric_key_user = {} - -sanitized_numeric_key_user = { - u'key': '33', - u'custom': { - u'bizzle': u'def' - } -} - - -def setup_function(function): - global numeric_key_user - numeric_key_user = { - u'key': 33, - u'custom': { - u'bizzle': u'def' - } - } - def make_client(store): return LDClient(config=Config(sdk_key = 'SDK_KEY', @@ -90,11 +71,6 @@ def test_toggle_offline(): assert offline_client.variation('feature.key', user, default=None) is None -def test_sanitize_user(): - client._sanitize_user(numeric_key_user) - assert numeric_key_user == sanitized_numeric_key_user - - def test_identify(): client.identify(user) @@ -102,13 +78,6 @@ def test_identify(): assert e['kind'] == 'identify' and e['key'] == u'xyz' and e['user'] == user -def test_identify_numeric_key_user(): - client.identify(numeric_key_user) - - e = get_first_event(client) - assert e['kind'] == 'identify' and e['key'] == '33' and e['user'] == sanitized_numeric_key_user - - def test_track(): client.track('my_event', user, 42) @@ -116,14 +85,6 @@ def test_track(): assert e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == user and e['data'] == 42 -def test_track_numeric_key_user(): - client.track('my_event', numeric_key_user, 42) - - e = get_first_event(client) - assert e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == sanitized_numeric_key_user \ - and e['data'] == 42 - - def test_defaults(): my_client = LDClient(config=Config(base_uri="http://localhost:3000", defaults={"foo": "bar"}, From b7035a567c42f5d25d8cfd4f660fb533fbedd805 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 28 Mar 2019 17:50:35 -0700 Subject: [PATCH 102/190] more unit tests --- testing/test_ldclient.py | 30 +++++++++++++++++++++++++++++- 1 file changed, 29 insertions(+), 1 deletion(-) diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 90bdeb4c..12746857 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -50,7 +50,15 @@ def make_off_flag_with_value(key, value): def get_first_event(c): - return c._event_processor._events.pop(0) + e = c._event_processor._events.pop(0) + c._event_processor._events = [] + return e + + +def count_events(c): + n = len(c._event_processor._events) + c._event_processor._events = [] + return n def test_ctor_both_sdk_keys_set(): @@ -78,6 +86,16 @@ def test_identify(): assert e['kind'] == 'identify' and e['key'] == u'xyz' and e['user'] == user +def test_identify_no_user(): + client.identify(None) + assert count_events(client) == 0 + + +def test_identify_no_user_key(): + client.identify({ 'name': 'nokey' }) + assert count_events(client) == 0 + + def test_track(): client.track('my_event', user, 42) @@ -85,6 +103,16 @@ def test_track(): assert e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == user and e['data'] == 42 +def test_track_no_user(): + client.track('my_event', None) + assert count_events(client) == 0 + + +def test_track_no_user_key(): + client.track('my_event', { 'name': 'nokey' }) + assert count_events(client) == 0 + + def test_defaults(): my_client = LDClient(config=Config(base_uri="http://localhost:3000", defaults={"foo": "bar"}, From 44101b236b756dd32257ffca7f31a637235a4a99 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 28 Mar 2019 18:07:36 -0700 Subject: [PATCH 103/190] remove redundant sanitize step --- ldclient/client.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index edb9f28f..f0b973a2 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -266,9 +266,6 @@ def _evaluate_internal(self, key, user, default, include_reasons_in_events): if self._config.offline: return EvaluationDetail(default, None, error_reason('CLIENT_NOT_READY')) - if user is not None: - self._sanitize_user(user) - def send_event(value, variation=None, flag=None, reason=None): self._send_event({'kind': 'feature', 'key': key, 'user': user, 'value': value, 'variation': variation, 'default': default, @@ -423,10 +420,5 @@ def secure_mode_hash(self, user): return "" return hmac.new(self._config.sdk_key.encode(), user.get('key').encode(), hashlib.sha256).hexdigest() - @staticmethod - def _sanitize_user(user): - if 'key' in user: - user['key'] = str(user['key']) - __all__ = ['LDClient', 'Config'] From ddfb3c2a910878d8d36fa5bb6b11cd197c0a7bc5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 9 Apr 2019 11:57:17 -0700 Subject: [PATCH 104/190] ensure that client components are cleaned up correct in every configuration --- ldclient/client.py | 81 ++++++------ ldclient/event_processor.py | 20 --- ldclient/impl/stubs.py | 39 ++++++ testing/test_ldclient.py | 246 ++++++++++++++++++++++-------------- 4 files changed, 232 insertions(+), 154 deletions(-) create mode 100644 ldclient/impl/stubs.py diff --git a/ldclient/client.py b/ldclient/client.py index f0b973a2..a16cce12 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -8,11 +8,11 @@ import traceback from ldclient.config import Config as Config -from ldclient.event_processor import NullEventProcessor from ldclient.feature_requester import FeatureRequesterImpl from ldclient.feature_store import _FeatureStoreDataSetSorter from ldclient.flag import EvaluationDetail, evaluate, error_reason from ldclient.flags_state import FeatureFlagsState +from ldclient.impl.stubs import NullEventProcessor, NullUpdateProcessor from ldclient.interfaces import FeatureStore from ldclient.polling import PollingUpdateProcessor from ldclient.streaming import StreamingUpdateProcessor @@ -94,45 +94,21 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): self._store = _FeatureStoreClientWrapper(self._config.feature_store) """ :type: FeatureStore """ - if self._config.offline or not self._config.send_events: - self._event_processor = NullEventProcessor() - else: - self._event_processor = self._config.event_processor_class(self._config) - if self._config.offline: log.info("Started LaunchDarkly Client in offline mode") - return if self._config.use_ldd: log.info("Started LaunchDarkly Client in LDD mode") - return - update_processor_ready = threading.Event() - - if self._config.update_processor_class: - log.info("Using user-specified update processor: " + str(self._config.update_processor_class)) - self._update_processor = self._config.update_processor_class( - self._config, self._store, update_processor_ready) - else: - if self._config.feature_requester_class: - feature_requester = self._config.feature_requester_class(self._config) - else: - feature_requester = FeatureRequesterImpl(self._config) - """ :type: FeatureRequester """ - - if self._config.stream: - self._update_processor = StreamingUpdateProcessor( - self._config, feature_requester, self._store, update_processor_ready) - else: - log.info("Disabling streaming API") - log.warn("You should only disable the streaming API if instructed to do so by LaunchDarkly support") - self._update_processor = PollingUpdateProcessor( - self._config, feature_requester, self._store, update_processor_ready) - """ :type: UpdateProcessor """ + self._event_processor = self._make_event_processor(self._config) + update_processor_ready = threading.Event() + self._update_processor = self._make_update_processor(self._config, self._store, update_processor_ready) self._update_processor.start() - log.info("Waiting up to " + str(start_wait) + " seconds for LaunchDarkly client to initialize...") - update_processor_ready.wait(start_wait) + + if start_wait > 0 and not self._config.offline and not self._config.use_ldd: + log.info("Waiting up to " + str(start_wait) + " seconds for LaunchDarkly client to initialize...") + update_processor_ready.wait(start_wait) if self._update_processor.initialized() is True: log.info("Started LaunchDarkly Client: OK") @@ -140,6 +116,32 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): log.warn("Initialization timeout exceeded for LaunchDarkly Client or an error occurred. " "Feature Flags may not yet be available.") + def _make_event_processor(self, config): + if config.offline or not config.send_events: + return NullEventProcessor() + return config.event_processor_class(config) + + def _make_update_processor(self, config, store, ready): + if config.update_processor_class: + log.info("Using user-specified update processor: " + str(config.update_processor_class)) + return self._config.update_processor_class(config, store, ready) + + if config.offline or config.use_ldd: + return NullUpdateProcessor(config, store, ready) + + if config.feature_requester_class: + feature_requester = config.feature_requester_class(config) + else: + feature_requester = FeatureRequesterImpl(config) + """ :type: FeatureRequester """ + + if config.stream: + return StreamingUpdateProcessor(config, feature_requester, store, ready) + + log.info("Disabling streaming API") + log.warn("You should only disable the streaming API if instructed to do so by LaunchDarkly support") + return PollingUpdateProcessor(config, feature_requester, store, ready) + def get_sdk_key(self): """Returns the configured SDK key. @@ -153,13 +155,16 @@ def close(self): Do not attempt to use the client after calling this method. """ log.info("Closing LaunchDarkly client..") - if self.is_offline(): - return - if self._event_processor: - self._event_processor.stop() - if self._update_processor and self._update_processor.is_alive(): - self._update_processor.stop() + self._event_processor.stop() + self._update_processor.stop() + # These magic methods allow a client object to be automatically cleaned up by the "with" scope operator + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + def _send_event(self, event): self._event_processor.send_event(event) diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index b5b0e370..2bd4f322 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -37,26 +37,6 @@ __USER_ATTRS_TO_STRINGIFY_FOR_EVENTS__ = [ "key", "secondary", "ip", "country", "email", "firstName", "lastName", "avatar", "name" ] -class NullEventProcessor(EventProcessor): - def __init__(self): - pass - - def start(self): - pass - - def stop(self): - pass - - def is_alive(self): - return False - - def send_event(self, event): - pass - - def flush(self): - pass - - EventProcessorMessage = namedtuple('EventProcessorMessage', ['type', 'param']) diff --git a/ldclient/impl/stubs.py b/ldclient/impl/stubs.py new file mode 100644 index 00000000..30d0eea8 --- /dev/null +++ b/ldclient/impl/stubs.py @@ -0,0 +1,39 @@ + +from ldclient.interfaces import EventProcessor, UpdateProcessor + + +class NullEventProcessor(EventProcessor): + def __init__(self): + pass + + def start(self): + pass + + def stop(self): + pass + + def is_alive(self): + return False + + def send_event(self, event): + pass + + def flush(self): + pass + + +class NullUpdateProcessor(UpdateProcessor): + def __init__(self, config, store, ready): + self._ready = ready + + def start(self): + self._ready.set() + + def stop(self): + pass + + def is_alive(self): + return False + + def initialized(self): + return True diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 12746857..1c19fe16 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -1,8 +1,13 @@ from ldclient.client import LDClient, Config -from ldclient.event_processor import NullEventProcessor +from ldclient.event_processor import DefaultEventProcessor from ldclient.feature_store import InMemoryFeatureStore +from ldclient.impl.stubs import NullEventProcessor, NullUpdateProcessor from ldclient.interfaces import UpdateProcessor +from ldclient.polling import PollingUpdateProcessor +from ldclient.streaming import StreamingUpdateProcessor from ldclient.versioned_data_kind import FEATURES, SEGMENTS + +import logging import pytest from testing.stub_util import CapturingFeatureStore, MockEventProcessor, MockUpdateProcessor from testing.sync_util import wait_until @@ -13,14 +18,8 @@ import Queue as queue -client = LDClient(config=Config(base_uri="http://localhost:3000", - event_processor_class = MockEventProcessor, update_processor_class = MockUpdateProcessor)) -offline_client = LDClient(config= - Config(sdk_key="secret", base_uri="http://localhost:3000", - offline=True)) -no_send_events_client = LDClient(config= - Config(sdk_key="secret", base_uri="http://localhost:3000", - update_processor_class = MockUpdateProcessor, send_events=False)) +unreachable_uri="http://fake" + user = { u'key': u'xyz', @@ -30,14 +29,32 @@ } -def make_client(store): +def make_client(store = InMemoryFeatureStore()): return LDClient(config=Config(sdk_key = 'SDK_KEY', - base_uri="http://localhost:3000", + base_uri=unreachable_uri, + events_uri=unreachable_uri, + stream_uri=unreachable_uri, event_processor_class=MockEventProcessor, update_processor_class=MockUpdateProcessor, feature_store=store)) +def make_offline_client(): + return LDClient(config=Config(sdk_key="secret", + offline=True, + base_uri=unreachable_uri, + events_uri=unreachable_uri, + stream_uri=unreachable_uri)) + + +def make_ldd_client(): + return LDClient(config=Config(sdk_key="secret", + use_ldd=True, + base_uri=unreachable_uri, + events_uri=unreachable_uri, + stream_uri=unreachable_uri)) + + def make_off_flag_with_value(key, value): return { u'key': key, @@ -68,56 +85,91 @@ def test_ctor_both_sdk_keys_set(): def test_client_has_null_event_processor_if_offline(): - assert isinstance(offline_client._event_processor, NullEventProcessor) + with make_offline_client() as client: + assert isinstance(client._event_processor, NullEventProcessor) def test_client_has_null_event_processor_if_send_events_off(): - assert isinstance(no_send_events_client._event_processor, NullEventProcessor) + config = Config(sdk_key="secret", base_uri=unreachable_uri, + update_processor_class = MockUpdateProcessor, send_events=False) + with LDClient(config=config) as client: + assert isinstance(client._event_processor, NullEventProcessor) + + +def test_client_has_normal_event_processor_in_ldd_mode(): + with make_ldd_client() as client: + assert isinstance(client._event_processor, DefaultEventProcessor) + + +def test_client_has_null_update_processor_in_offline_mode(): + with make_offline_client() as client: + assert isinstance(client._update_processor, NullUpdateProcessor) + + +def test_client_has_null_update_processor_in_ldd_mode(): + with make_ldd_client() as client: + assert isinstance(client._update_processor, NullUpdateProcessor) + + +def test_client_has_streaming_processor_by_default(): + config = Config(sdk_key="secret", base_uri=unreachable_uri, stream_uri=unreachable_uri, send_events=False) + with LDClient(config=config, start_wait=0) as client: + assert isinstance(client._update_processor, StreamingUpdateProcessor) + + +def test_client_has_polling_processor_if_streaming_is_disabled(): + config = Config(sdk_key="secret", stream=False, base_uri=unreachable_uri, stream_uri=unreachable_uri, send_events=False) + with LDClient(config=config, start_wait=0) as client: + assert isinstance(client._update_processor, PollingUpdateProcessor) def test_toggle_offline(): - assert offline_client.variation('feature.key', user, default=None) is None + with make_offline_client() as client: + assert client.variation('feature.key', user, default=None) is None def test_identify(): - client.identify(user) - - e = get_first_event(client) - assert e['kind'] == 'identify' and e['key'] == u'xyz' and e['user'] == user + with make_client() as client: + client.identify(user) + e = get_first_event(client) + assert e['kind'] == 'identify' and e['key'] == u'xyz' and e['user'] == user def test_identify_no_user(): - client.identify(None) - assert count_events(client) == 0 + with make_client() as client: + client.identify(None) + assert count_events(client) == 0 def test_identify_no_user_key(): - client.identify({ 'name': 'nokey' }) - assert count_events(client) == 0 + with make_client() as client: + client.identify({ 'name': 'nokey' }) + assert count_events(client) == 0 def test_track(): - client.track('my_event', user, 42) - - e = get_first_event(client) - assert e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == user and e['data'] == 42 + with make_client() as client: + client.track('my_event', user, 42) + e = get_first_event(client) + assert e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == user and e['data'] == 42 def test_track_no_user(): - client.track('my_event', None) - assert count_events(client) == 0 + with make_client() as client: + client.track('my_event', None) + assert count_events(client) == 0 def test_track_no_user_key(): - client.track('my_event', { 'name': 'nokey' }) - assert count_events(client) == 0 + with make_client() as client: + client.track('my_event', { 'name': 'nokey' }) + assert count_events(client) == 0 def test_defaults(): - my_client = LDClient(config=Config(base_uri="http://localhost:3000", - defaults={"foo": "bar"}, - offline=True)) - assert "bar" == my_client.variation('foo', user, default=None) + config=Config(base_uri="http://localhost:3000", defaults={"foo": "bar"}, offline=True) + with LDClient(config=config) as client: + assert "bar" == client.variation('foo', user, default=None) def test_defaults_and_online(): @@ -144,7 +196,8 @@ def test_defaults_and_online_no_default(): def test_no_defaults(): - assert "bar" == offline_client.variation('foo', user, default="bar") + with make_offline_client() as client: + assert "bar" == client.variation('foo', user, default="bar") def test_event_for_existing_feature(): @@ -153,19 +206,19 @@ def test_event_for_existing_feature(): feature['debugEventsUntilDate'] = 1000 store = InMemoryFeatureStore() store.init({FEATURES: {'feature.key': feature}}) - client = make_client(store) - assert 'value' == client.variation('feature.key', user, default='default') - e = get_first_event(client) - assert (e['kind'] == 'feature' and - e['key'] == 'feature.key' and - e['user'] == user and - e['version'] == feature['version'] and - e['value'] == 'value' and - e['variation'] == 0 and - e.get('reason') is None and - e['default'] == 'default' and - e['trackEvents'] == True and - e['debugEventsUntilDate'] == 1000) + with make_client(store) as client: + assert 'value' == client.variation('feature.key', user, default='default') + e = get_first_event(client) + assert (e['kind'] == 'feature' and + e['key'] == 'feature.key' and + e['user'] == user and + e['version'] == feature['version'] and + e['value'] == 'value' and + e['variation'] == 0 and + e.get('reason') is None and + e['default'] == 'default' and + e['trackEvents'] == True and + e['debugEventsUntilDate'] == 1000) def test_event_for_existing_feature_with_reason(): @@ -174,33 +227,33 @@ def test_event_for_existing_feature_with_reason(): feature['debugEventsUntilDate'] = 1000 store = InMemoryFeatureStore() store.init({FEATURES: {'feature.key': feature}}) - client = make_client(store) - assert 'value' == client.variation_detail('feature.key', user, default='default').value - e = get_first_event(client) - assert (e['kind'] == 'feature' and - e['key'] == 'feature.key' and - e['user'] == user and - e['version'] == feature['version'] and - e['value'] == 'value' and - e['variation'] == 0 and - e['reason'] == {'kind': 'OFF'} and - e['default'] == 'default' and - e['trackEvents'] == True and - e['debugEventsUntilDate'] == 1000) + with make_client(store) as client: + assert 'value' == client.variation_detail('feature.key', user, default='default').value + e = get_first_event(client) + assert (e['kind'] == 'feature' and + e['key'] == 'feature.key' and + e['user'] == user and + e['version'] == feature['version'] and + e['value'] == 'value' and + e['variation'] == 0 and + e['reason'] == {'kind': 'OFF'} and + e['default'] == 'default' and + e['trackEvents'] == True and + e['debugEventsUntilDate'] == 1000) def test_event_for_unknown_feature(): store = InMemoryFeatureStore() store.init({FEATURES: {}}) - client = make_client(store) - assert 'default' == client.variation('feature.key', user, default='default') - e = get_first_event(client) - assert (e['kind'] == 'feature' and - e['key'] == 'feature.key' and - e['user'] == user and - e['value'] == 'default' and - e['variation'] == None and - e['default'] == 'default') + with make_client(store) as client: + assert 'default' == client.variation('feature.key', user, default='default') + e = get_first_event(client) + assert (e['kind'] == 'feature' and + e['key'] == 'feature.key' and + e['user'] == user and + e['value'] == 'default' and + e['variation'] == None and + e['default'] == 'default') def test_event_for_existing_feature_with_no_user(): @@ -209,18 +262,18 @@ def test_event_for_existing_feature_with_no_user(): feature['debugEventsUntilDate'] = 1000 store = InMemoryFeatureStore() store.init({FEATURES: {'feature.key': feature}}) - client = make_client(store) - assert 'default' == client.variation('feature.key', None, default='default') - e = get_first_event(client) - assert (e['kind'] == 'feature' and - e['key'] == 'feature.key' and - e['user'] == None and - e['version'] == feature['version'] and - e['value'] == 'default' and - e['variation'] == None and - e['default'] == 'default' and - e['trackEvents'] == True and - e['debugEventsUntilDate'] == 1000) + with make_client(store) as client: + assert 'default' == client.variation('feature.key', None, default='default') + e = get_first_event(client) + assert (e['kind'] == 'feature' and + e['key'] == 'feature.key' and + e['user'] == None and + e['version'] == feature['version'] and + e['value'] == 'default' and + e['variation'] == None and + e['default'] == 'default' and + e['trackEvents'] == True and + e['debugEventsUntilDate'] == 1000) def test_event_for_existing_feature_with_no_user_key(): @@ -229,24 +282,25 @@ def test_event_for_existing_feature_with_no_user_key(): feature['debugEventsUntilDate'] = 1000 store = InMemoryFeatureStore() store.init({FEATURES: {'feature.key': feature}}) - client = make_client(store) - bad_user = { u'name': u'Bob' } - assert 'default' == client.variation('feature.key', bad_user, default='default') - e = get_first_event(client) - assert (e['kind'] == 'feature' and - e['key'] == 'feature.key' and - e['user'] == bad_user and - e['version'] == feature['version'] and - e['value'] == 'default' and - e['variation'] == None and - e['default'] == 'default' and - e['trackEvents'] == True and - e['debugEventsUntilDate'] == 1000) + with make_client(store) as client: + bad_user = { u'name': u'Bob' } + assert 'default' == client.variation('feature.key', bad_user, default='default') + e = get_first_event(client) + assert (e['kind'] == 'feature' and + e['key'] == 'feature.key' and + e['user'] == bad_user and + e['version'] == feature['version'] and + e['value'] == 'default' and + e['variation'] == None and + e['default'] == 'default' and + e['trackEvents'] == True and + e['debugEventsUntilDate'] == 1000) def test_secure_mode_hash(): user = {'key': 'Message'} - assert offline_client.secure_mode_hash(user) == "aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597" + with make_offline_client() as client: + assert client.secure_mode_hash(user) == "aa747c502a898200f9e4fa21bac68136f886a0e27aec70ba06daf2e2a5cb5597" dependency_ordering_test_data = { From 758568447c52d563f855b25f5fe0830fd12f264c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 9 Apr 2019 16:24:20 -0700 Subject: [PATCH 105/190] miscellaneous test fixes --- ldclient/client.py | 2 +- testing/test_ldclient.py | 2 ++ testing/test_ldclient_evaluation.py | 2 +- 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index a16cce12..16d91b0a 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -124,7 +124,7 @@ def _make_event_processor(self, config): def _make_update_processor(self, config, store, ready): if config.update_processor_class: log.info("Using user-specified update processor: " + str(config.update_processor_class)) - return self._config.update_processor_class(config, store, ready) + return config.update_processor_class(config, store, ready) if config.offline or config.use_ldd: return NullUpdateProcessor(config, store, ready) diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index 1c19fe16..e1ee3910 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -111,12 +111,14 @@ def test_client_has_null_update_processor_in_ldd_mode(): assert isinstance(client._update_processor, NullUpdateProcessor) +@pytest.mark.skip("Can't currently use a live stream processor in tests because its error logging will disrupt other tests.") def test_client_has_streaming_processor_by_default(): config = Config(sdk_key="secret", base_uri=unreachable_uri, stream_uri=unreachable_uri, send_events=False) with LDClient(config=config, start_wait=0) as client: assert isinstance(client._update_processor, StreamingUpdateProcessor) +@pytest.mark.skip("Can't currently use a live polling processor in tests because its error logging will disrupt other tests.") def test_client_has_polling_processor_if_streaming_is_disabled(): config = Config(sdk_key="secret", stream=False, base_uri=unreachable_uri, stream_uri=unreachable_uri, send_events=False) with LDClient(config=config, start_wait=0) as client: diff --git a/testing/test_ldclient_evaluation.py b/testing/test_ldclient_evaluation.py index be925a5c..f716c5de 100644 --- a/testing/test_ldclient_evaluation.py +++ b/testing/test_ldclient_evaluation.py @@ -123,7 +123,7 @@ def test_variation_detail_when_user_is_none(): expected = EvaluationDetail('default', None, {'kind': 'ERROR', 'errorKind': 'USER_NOT_SPECIFIED'}) assert expected == client.variation_detail('feature.key', None, default='default') -def test_variation_when_user_has_no_key(): +def test_variation_detail_when_user_has_no_key(): feature = make_off_flag_with_value('feature.key', 'value') store = InMemoryFeatureStore() store.init({FEATURES: {'feature.key': feature}}) From 3b16ebf1b8938c5a0a798d792d1845e4e46642c6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 13 Apr 2019 16:40:52 -0700 Subject: [PATCH 106/190] support metric value with track() --- ldclient/client.py | 5 +++-- ldclient/event_processor.py | 7 +++++-- ldclient/impl/event_factory.py | 12 ++++++++---- testing/test_event_processor.py | 3 ++- testing/test_ldclient.py | 16 +++++++++++++++- 5 files changed, 33 insertions(+), 10 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 1d816d2d..ce17f5e4 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -171,7 +171,7 @@ def __exit__(self, type, value, traceback): def _send_event(self, event): self._event_processor.send_event(event) - def track(self, event_name, user, data=None): + def track(self, event_name, user, data=None, metric_value=None): """Tracks that a user performed an event. LaunchDarkly automatically tracks pageviews and clicks that are specified in the Goals @@ -181,11 +181,12 @@ def track(self, event_name, user, data=None): :param string event_name: the name of the event, which may correspond to a goal in A/B tests :param dict user: the attributes of the user :param data: optional additional data associated with the event + :param metric_value: optional numeric value that can be used in analytics """ if user is None or user.get('key') is None: log.warn("Missing user or user key when calling track().") else: - self._send_event(self._event_factory_default.new_custom_event(event_name, user, data)) + self._send_event(self._event_factory_default.new_custom_event(event_name, user, data, metric_value)) def identify(self, user): """Registers the user. diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 2bd4f322..d7f96af5 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -83,13 +83,16 @@ def make_output_event(self, e): out = { 'kind': 'custom', 'creationDate': e['creationDate'], - 'key': e['key'], - 'data': e.get('data') + 'key': e['key'] } if self._inline_users: out['user'] = self._process_user(e) else: out['userKey'] = self._get_userkey(e) + if e.get('data') is not None: + out['data'] = e['data'] + if e.get('metricValue') is not None: + out['metricValue'] = e['metricValue'] return out elif kind == 'index': return { diff --git a/ldclient/impl/event_factory.py b/ldclient/impl/event_factory.py index b3c559f2..d2a62ad8 100644 --- a/ldclient/impl/event_factory.py +++ b/ldclient/impl/event_factory.py @@ -69,13 +69,17 @@ def new_identify_event(self, user): 'user': user } - def new_custom_event(self, event_name, user, data): - return { + def new_custom_event(self, event_name, user, data, metric_value): + e = { 'kind': 'custom', 'key': event_name, - 'user': user, - 'data': data + 'user': user } + if data is not None: + e['data'] = data + if metric_value is not None: + e['metricValue'] = metric_value + return e def _is_experiment(self, flag, reason): if reason is not None: diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index a2e110b2..dfb4983f 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -392,7 +392,7 @@ def test_nontracked_events_are_summarized(): def test_custom_event_is_queued_with_user(): setup_processor(Config()) - e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '} } + e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '}, 'metricValue': 1.5 } ep.send_event(e) output = flush_and_get_events() @@ -523,6 +523,7 @@ def check_custom_event(data, source, inline_user): assert data['userKey'] == source['user']['key'] else: assert data['user'] == inline_user + assert data.get('metricValue') == source.get('metricValue') def check_summary_event(data): assert data['kind'] == 'summary' diff --git a/testing/test_ldclient.py b/testing/test_ldclient.py index d7177bcc..a6789e4d 100644 --- a/testing/test_ldclient.py +++ b/testing/test_ldclient.py @@ -150,10 +150,24 @@ def test_identify_no_user_key(): def test_track(): + with make_client() as client: + client.track('my_event', user) + e = get_first_event(client) + assert e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == user and e.get('data') is None and e.get('metricValue') is None + + +def test_track_with_data(): with make_client() as client: client.track('my_event', user, 42) e = get_first_event(client) - assert e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == user and e['data'] == 42 + assert e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == user and e['data'] == 42 and e.get('metricValue') is None + + +def test_track_with_metric_value(): + with make_client() as client: + client.track('my_event', user, 42, 1.5) + e = get_first_event(client) + assert e['kind'] == 'custom' and e['key'] == 'my_event' and e['user'] == user and e['data'] == 42 and e.get('metricValue') == 1.5 def test_track_no_user(): From 2f6961df61a14542d8a973ab5a89f7343acf4cab Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 16 Apr 2019 18:39:04 -0700 Subject: [PATCH 107/190] update method description --- ldclient/client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ldclient/client.py b/ldclient/client.py index ce17f5e4..6e74ea3b 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -181,7 +181,9 @@ def track(self, event_name, user, data=None, metric_value=None): :param string event_name: the name of the event, which may correspond to a goal in A/B tests :param dict user: the attributes of the user :param data: optional additional data associated with the event - :param metric_value: optional numeric value that can be used in analytics + :param metric_value: a numeric value used by the LaunchDarkly experimentation feature in + numeric custom metrics. Can be omitted if this event is used by only non-numeric metrics. + This field will also be returned as part of the custom event for Data Export. """ if user is None or user.get('key') is None: log.warn("Missing user or user key when calling track().") From 902be0206056c64c6e4285863171d735f8f21413 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 25 Apr 2019 20:15:29 -0700 Subject: [PATCH 108/190] update readme format and repo links --- CONTRIBUTING.md | 52 +++++++++++------- README.md | 129 ++++++++------------------------------------- scripts/release.sh | 4 +- setup.py | 2 +- 4 files changed, 58 insertions(+), 129 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index af5083c2..91c39924 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,33 +1,47 @@ -Contributing ------------- +# Contributing to the LaunchDarkly Server-side SDK for Python -We encourage pull-requests and other contributions from the community. We've also published an [SDK contributor's guide](http://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. +LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. -Development information (for developing this module itself) ------------------------------------------------------------ +## Submitting bug reports and feature requests + +The LaunchDarkly SDK team monitors the issue tracker associated with the `launchdarkly/python-server-sdk` SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. -1. One-time setup: +## Submitting pull requests + +We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. - mkvirtualenv python-client +## Build instructions -1. When working on the project be sure to activate the python-client virtualenv using the technique of your choosing. +### Setup -1. Install requirements (run-time & test): +It's advisable to use `virtualenv` to create a development environment within the project directory: - pip install -r requirements.txt - pip install -r test-requirements.txt +``` +mkvirtualenv python-client +source ./python-client/bin/activate +``` -1. When running unit tests, in order for `test_feature_store.py` to run, you'll need all of the supported databases (Redis, Consul, DynamoDB) running locally on their default ports. +To install the runtime and test requirements: -1. If you want integration tests to run, set the ```LD_SDK_KEY``` environment variable to a valid production SDK Key. +``` +pip install -r requirements.txt +pip install -r test-requirements.txt +``` -1. ```$ py.test testing``` +The additional requirements files `consul-requirements.txt`, `dynamodb-requirements.txt`, `redis-requirements.txt`, and `test-filesource-optional-requirements.txt` can also be installed if you need to test the corresponding features. -1. All code must be compatible with all supported Python versions as described in README. Most portability issues are addressed by using the `six` package. We are avoiding the use of `__future__` imports, since they can easily be omitted by mistake causing code in one file to behave differently from another; instead, whenever possible, use an explicit approach that makes it clear what the desired behavior is in all Python versions (e.g. if you want to do floor division, use `//`; if you want to divide as floats, explicitly cast to floats). +### Testing -Developing with different Python versions ------------------------------------------ +To run all unit tests: -Example for switching to Python 3: +``` +pytest +``` -```virtualenv -p `which python3` ~/.virtualenvs/python-client``` \ No newline at end of file +There are also integration tests that can be run against the LaunchDarkly service. To enable them, set the environment variable `LD_SDK_KEY` to a valid production SDK Key. + +### Portability + +Most portability issues are addressed by using the `six` package. We are avoiding the use of `__future__` imports, since they can easily be omitted by mistake causing code in one file to behave differently from another; instead, whenever possible, use an explicit approach that makes it clear what the desired behavior is in all Python versions (e.g. if you want to do floor division, use `//`; if you want to divide as floats, explicitly cast to floats). + +It is preferable to run tests against all supported minor versions of Python (as described in `README.md` under Requirements), or at least the lowest and highest versions, prior to submitting a pull request. However, LaunchDarkly's CI tests will run automatically against all supported versions. diff --git a/README.md b/README.md index 6013179f..74e2fb23 100644 --- a/README.md +++ b/README.md @@ -1,138 +1,53 @@ -LaunchDarkly SDK for Python -=========================== +# LaunchDarkly Server-side SDK for Python -[![Circle CI](https://img.shields.io/circleci/project/launchdarkly/python-client.png)](https://circleci.com/gh/launchdarkly/python-client) +[![Circle CI](https://img.shields.io/circleci/project/launchdarkly/python-server-sdk.png)](https://circleci.com/gh/launchdarkly/python-server-sdk) -[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bhttps%3A%2F%2Fgithub.com%2Flaunchdarkly%2Fpython-client.svg?type=shield)](https://app.fossa.io/projects/git%2Bhttps%3A%2F%2Fgithub.com%2Flaunchdarkly%2Fpython-client?ref=badge_shield) +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bhttps%3A%2F%2Fgithub.com%2Flaunchdarkly%2Fpython-server-sdk.svg?type=shield)](https://app.fossa.io/projects/git%2Bhttps%3A%2F%2Fgithub.com%2Flaunchdarkly%2Fpython-server-sdk?ref=badge_shield) [![PyPI](https://img.shields.io/pypi/v/ldclient-py.svg?maxAge=2592000)](https://pypi.python.org/pypi/ldclient-py) [![PyPI](https://img.shields.io/pypi/pyversions/ldclient-py.svg)](https://pypi.python.org/pypi/ldclient-py) -[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) - -Supported Python versions -------------------------- - -This version of the LaunchDarkly SDK is compatible with Python 2.7, and Python 3.3 through 3.7. - -Quick setup ------------ - -1. Install the Python SDK with `pip` - - pip install ldclient-py - -2. Configure the library with your sdk key: - - import ldclient - -3. Get the client: - - ldclient.set_sdk_key("your sdk key") - client = ldclient.get() - - -HTTPS proxy ------------- -Python's standard HTTP library provides built-in support for the use of a HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. +## LaunchDarkly overview -How to set the HTTPS_PROXY environment variable on Mac/Linux systems: -``` -export HTTPS_PROXY=https://web-proxy.domain.com:8080 -``` - - -How to set the HTTPS_PROXY environment variable on Windows systems: -``` -set HTTPS_PROXY=https://web-proxy.domain.com:8080 -``` - -Or it can be set from within python: -``` -os.environ["https_proxy"] = "https://web-proxy.domain.com:8080" -``` - -If your proxy requires authentication then you can prefix the URN with your login information: -``` -export HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 -``` -or -``` -set HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 -``` - - -Your first feature flag ------------------------ - -1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com) -2. In your application code, use the feature's key to check whether the flag is on for each user: - - if client.variation("your.flag.key", {"key": "user@test.com"}, False): - # application code to show the feature - else: - # the code to run if the feature is off - -Supported Python versions -------------------------- - -The SDK is tested with the most recent patch releases of Python 2.7, 3.3, 3.4, 3.5, and 3.6. Python 2.6 is no longer supported. - -Database integrations ---------------------- +[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/docs/getting-started) using LaunchDarkly today! + +[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) -Feature flag data can be kept in a persistent store using Consul, DynamoDB, or Redis. These adapters are implemented in the `Consul`, `DynamoDB` and `Redis` classes in `ldclient.integrations`; to use them, call the `new_feature_store` method in the appropriate class, and put the returned object in the `feature_store` property of your client configuration. See [`ldclient.integrations`](https://launchdarkly-python-sdk.readthedocs.io/en/latest/api-integrations.html#module-ldclient.integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +## Supported Python versions -Note that Consul is not supported in Python 3.3 or 3.4. +This version of the LaunchDarkly SDK is compatible with Python 2.7 and 3.3 through 3.7. It is tested with the most recent patch releases of those versions. Python 2.6 is no longer supported. -Using flag data from a file ---------------------------- +## Getting started -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`ldclient.integrations.Files`](https://launchdarkly-python-sdk.readthedocs.io/en/latest/api-integrations.html#ldclient.integrations.Files) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. +Refer to the [SDK reference guide](https://docs.launchdarkly.com/docs/python-sdk-reference) for instructions on getting started with using the SDK. -Learn more ----------- +## Learn more Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [complete reference guide for this SDK](http://docs.launchdarkly.com/docs/python-sdk-reference). Generated API documentation is on [readthedocs.io](https://launchdarkly-python-sdk.readthedocs.io/en/latest/). -Testing -------- +## Testing We run integration tests for all our SDKs using a centralized test harness. This approach gives us the ability to test for consistency across SDKs, as well as test networking behavior in a long-running application. These tests cover each method in the SDK, and verify that event sending, flag evaluation, stream reconnection, and other aspects of the SDK all behave correctly. -[![Test Coverage](https://codeclimate.com/github/launchdarkly/python-client/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/python-client/coverage) The Code Climate coverage does not include the coverage provided by this integration test harness. +[![Test Coverage](https://codeclimate.com/github/launchdarkly/python-server-sdk/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/python-server-sdk/coverage) The Code Climate coverage does not include the coverage provided by this integration test harness. -Contributing ------------- +## Contributing -See [CONTRIBUTING](CONTRIBUTING.md) for more information. +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK. -About LaunchDarkly ------------------- +## About LaunchDarkly * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. -* LaunchDarkly provides feature flag SDKs for - * [Java](http://docs.launchdarkly.com/docs/java-sdk-reference "LaunchDarkly Java SDK") - * [JavaScript](http://docs.launchdarkly.com/docs/js-sdk-reference "LaunchDarkly JavaScript SDK") - * [PHP](http://docs.launchdarkly.com/docs/php-sdk-reference "LaunchDarkly PHP SDK") - * [Python](http://docs.launchdarkly.com/docs/python-sdk-reference "LaunchDarkly Python SDK") - * [Go](http://docs.launchdarkly.com/docs/go-sdk-reference "LaunchDarkly Go SDK") - * [Node.JS](http://docs.launchdarkly.com/docs/node-sdk-reference "LaunchDarkly Node SDK") - * [Electron](http://docs.launchdarkly.com/docs/electron-sdk-reference "LaunchDarkly Electron SDK") - * [.NET](http://docs.launchdarkly.com/docs/dotnet-sdk-reference "LaunchDarkly .Net SDK") - * [Ruby](http://docs.launchdarkly.com/docs/ruby-sdk-reference "LaunchDarkly Ruby SDK") - * [iOS](http://docs.launchdarkly.com/docs/ios-sdk-reference "LaunchDarkly iOS SDK") - * [Android](http://docs.launchdarkly.com/docs/android-sdk-reference "LaunchDarkly Android SDK") - * [C/C++](http://docs.launchdarkly.com/docs/c-sdk-reference "LaunchDarkly C/C++ SDK") +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Check out [our documentation](https://docs.launchdarkly.com/docs) for a complete list. * Explore LaunchDarkly - * [launchdarkly.com](http://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information - * [docs.launchdarkly.com](http://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDKs - * [apidocs.launchdarkly.com](http://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation - * [blog.launchdarkly.com](http://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies diff --git a/scripts/release.sh b/scripts/release.sh index 089dae25..0f1808b7 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -9,7 +9,7 @@ # When done you should commit and push the changes made. set -uxe -echo "Starting python-client release." +echo "Starting python-server-sdk release." VERSION=$1 @@ -28,4 +28,4 @@ python setup.py sdist pip install twine python -m twine upload dist/* -echo "Done with python-client release" +echo "Done with python-server-sdk release" diff --git a/setup.py b/setup.py index 012def24..ee3faef9 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,7 @@ def run(self): author='LaunchDarkly', author_email='team@launchdarkly.com', packages=find_packages(), - url='https://github.com/launchdarkly/python-client', + url='https://github.com/launchdarkly/python-server-sdk', description='LaunchDarkly SDK for Python', long_description='LaunchDarkly SDK for Python', install_requires=reqs, From f41f2ccc210a872df2445825e5e837eb9d3cf5f7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 25 Apr 2019 20:28:37 -0700 Subject: [PATCH 109/190] allow unit tests to be run without databases --- CONTRIBUTING.md | 2 ++ testing/test_feature_store.py | 30 +++++++++++++++++------------- 2 files changed, 19 insertions(+), 13 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 91c39924..697a6753 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -38,6 +38,8 @@ To run all unit tests: pytest ``` +By default, the full unit test suite includes live tests of the integrations for Consul, DynamoDB, and Redis. Those tests expect you to have instances of all of those databases running locally. To skip them, set the environment variable `LD_SKIP_DATABASE_TESTS=1` before running the tests. + There are also integration tests that can be run against the LaunchDarkly service. To enable them, set the environment variable `LD_SDK_KEY` to a valid production SDK Key. ### Portability diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index ce0150cf..04267c16 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -1,5 +1,6 @@ import boto3 import json +import os import pytest import redis import time @@ -165,19 +166,22 @@ def _clear_data(self): class TestFeatureStore: - params = [ - InMemoryTester(), - RedisTester(CacheConfig.default()), - RedisTester(CacheConfig.disabled()), - RedisWithDeprecatedConstructorTester(CacheConfig.default()), - RedisWithDeprecatedConstructorTester(CacheConfig.disabled()), - DynamoDBTester(CacheConfig.default()), - DynamoDBTester(CacheConfig.disabled()) - ] - - if have_consul: - params.append(ConsulTester(CacheConfig.default())) - params.append(ConsulTester(CacheConfig.disabled())) + if os.environ.get('LD_SKIP_DATABASE_TESTS') == '1': + params = [ + InMemoryTester() + ] + else: + params = [ + RedisTester(CacheConfig.default()), + RedisTester(CacheConfig.disabled()), + RedisWithDeprecatedConstructorTester(CacheConfig.default()), + RedisWithDeprecatedConstructorTester(CacheConfig.disabled()), + DynamoDBTester(CacheConfig.default()), + DynamoDBTester(CacheConfig.disabled()) + ] + if have_consul: + params.append(ConsulTester(CacheConfig.default())) + params.append(ConsulTester(CacheConfig.disabled())) @pytest.fixture(params=params) def tester(self, request): From d764fd8dee20f7b5ab9e3c10f55712b7baf3447d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 25 Apr 2019 20:29:40 -0700 Subject: [PATCH 110/190] add missing test --- testing/test_feature_store.py | 1 + 1 file changed, 1 insertion(+) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index 04267c16..d64a25f8 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -172,6 +172,7 @@ class TestFeatureStore: ] else: params = [ + InMemoryTester(), RedisTester(CacheConfig.default()), RedisTester(CacheConfig.disabled()), RedisWithDeprecatedConstructorTester(CacheConfig.default()), From ea5d8e8a24dddde4f4ca2636032ce338b53a69f8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 26 Apr 2019 10:52:16 -0700 Subject: [PATCH 111/190] rm FOSSA link/badge --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index 74e2fb23..5766279f 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,6 @@ [![Circle CI](https://img.shields.io/circleci/project/launchdarkly/python-server-sdk.png)](https://circleci.com/gh/launchdarkly/python-server-sdk) -[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bhttps%3A%2F%2Fgithub.com%2Flaunchdarkly%2Fpython-server-sdk.svg?type=shield)](https://app.fossa.io/projects/git%2Bhttps%3A%2F%2Fgithub.com%2Flaunchdarkly%2Fpython-server-sdk?ref=badge_shield) - [![PyPI](https://img.shields.io/pypi/v/ldclient-py.svg?maxAge=2592000)](https://pypi.python.org/pypi/ldclient-py) [![PyPI](https://img.shields.io/pypi/pyversions/ldclient-py.svg)](https://pypi.python.org/pypi/ldclient-py) From 6ed12f1aa46240dbc1bfcd6d1307a1ac2f5f1f54 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 26 Apr 2019 12:09:24 -0700 Subject: [PATCH 112/190] misc fixes --- CONTRIBUTING.md | 4 ++-- README.md | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 697a6753..2027062b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -4,7 +4,7 @@ LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkl ## Submitting bug reports and feature requests -The LaunchDarkly SDK team monitors the issue tracker associated with the `launchdarkly/python-server-sdk` SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. +The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/python-server-sdk/issues) in the SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. ## Submitting pull requests @@ -14,7 +14,7 @@ We encourage pull requests and other contributions from the community. Before su ### Setup -It's advisable to use `virtualenv` to create a development environment within the project directory: +It's advisable to use [`virtualenv`](https://virtualenv.pypa.io/) to create a development environment within the project directory: ``` mkvirtualenv python-client diff --git a/README.md b/README.md index 5766279f..be38186a 100644 --- a/README.md +++ b/README.md @@ -29,8 +29,6 @@ Generated API documentation is on [readthedocs.io](https://launchdarkly-python-s We run integration tests for all our SDKs using a centralized test harness. This approach gives us the ability to test for consistency across SDKs, as well as test networking behavior in a long-running application. These tests cover each method in the SDK, and verify that event sending, flag evaluation, stream reconnection, and other aspects of the SDK all behave correctly. -[![Test Coverage](https://codeclimate.com/github/launchdarkly/python-server-sdk/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/python-server-sdk/coverage) The Code Climate coverage does not include the coverage provided by this integration test harness. - ## Contributing We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK. From cbac044ea647870b5949ab0c704011e4f3c7ef56 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 26 Apr 2019 13:18:46 -0700 Subject: [PATCH 113/190] minor doc link fix --- docs/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/index.rst b/docs/index.rst index 7a9d2c73..909ac27d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -8,7 +8,7 @@ LaunchDarkly Python SDK This is the API reference for the `LaunchDarkly `_ SDK for Python. -The latest version of the SDK can be found on `PyPI `_, and the source code is on `GitHub `_. +The latest version of the SDK can be found on `PyPI `_, and the source code is on `GitHub `_. For more information, see LaunchDarkly's `Quickstart `_ and `SDK Reference Guide `_. From 52c3b23649d59324618a6a1015af26a933e020d5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 May 2019 11:41:26 -0700 Subject: [PATCH 114/190] fix skipping of database tests --- testing/test_feature_store.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/testing/test_feature_store.py b/testing/test_feature_store.py index d64a25f8..90af57ca 100644 --- a/testing/test_feature_store.py +++ b/testing/test_feature_store.py @@ -19,6 +19,8 @@ from ldclient.redis_feature_store import RedisFeatureStore from ldclient.versioned_data_kind import FEATURES +skip_db_tests = os.environ.get('LD_SKIP_DATABASE_TESTS') == '1' + class InMemoryTester(object): def init_store(self): @@ -166,7 +168,7 @@ def _clear_data(self): class TestFeatureStore: - if os.environ.get('LD_SKIP_DATABASE_TESTS') == '1': + if skip_db_tests: params = [ InMemoryTester() ] @@ -321,6 +323,7 @@ def test_stores_with_different_prefixes_are_independent(self, tester): assert items == { 'flagB1': flag_b1, 'flagB2': flag_b2 } +@pytest.mark.skipif(skip_db_tests, reason="skipping database tests") class TestRedisFeatureStoreExtraTests: def test_upsert_race_condition_against_external_client_with_higher_version(self): other_client = redis.StrictRedis(host='localhost', port=6379, db=0) From 6161055385c842bbd234de20c6c6e45f76068057 Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Wed, 1 May 2019 13:21:36 -0700 Subject: [PATCH 115/190] renaming the package to launchdarkly-server-sdk (#108) --- CONTRIBUTING.md | 4 ++-- README.md | 4 ++-- docs/Makefile | 2 +- docs/conf.py | 12 ++++++------ docs/index.rst | 4 ++-- setup.py | 2 +- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 2027062b..7d2a9b8a 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -17,8 +17,8 @@ We encourage pull requests and other contributions from the community. Before su It's advisable to use [`virtualenv`](https://virtualenv.pypa.io/) to create a development environment within the project directory: ``` -mkvirtualenv python-client -source ./python-client/bin/activate +mkvirtualenv python-server-sdk +source ~/.virtualenvs/python-server-sdk/bin/activate ``` To install the runtime and test requirements: diff --git a/README.md b/README.md index be38186a..7858bbc9 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,8 @@ [![Circle CI](https://img.shields.io/circleci/project/launchdarkly/python-server-sdk.png)](https://circleci.com/gh/launchdarkly/python-server-sdk) -[![PyPI](https://img.shields.io/pypi/v/ldclient-py.svg?maxAge=2592000)](https://pypi.python.org/pypi/ldclient-py) -[![PyPI](https://img.shields.io/pypi/pyversions/ldclient-py.svg)](https://pypi.python.org/pypi/ldclient-py) +[![PyPI](https://img.shields.io/pypi/v/launchdarkly-server-sdk.svg?maxAge=2592000)](https://pypi.python.org/pypi/launchdarkly-server-sdk) +[![PyPI](https://img.shields.io/pypi/pyversions/launchdarkly-server-sdk.svg)](https://pypi.python.org/pypi/launchdarkly-server-sdk) ## LaunchDarkly overview diff --git a/docs/Makefile b/docs/Makefile index ebce0c0b..aea5aff6 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -5,7 +5,7 @@ SPHINXOPTS = SPHINXBUILD = sphinx-build -SPHINXPROJ = ldclient-py +SPHINXPROJ = launchdarkly-server-sdk SOURCEDIR = . BUILDDIR = build diff --git a/docs/conf.py b/docs/conf.py index 10f481f3..9e3db965 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -25,7 +25,7 @@ # -- Project information ----------------------------------------------------- -project = u'ldclient-py' +project = u'launchdarkly-server-sdk' copyright = u'2019, LaunchDarkly' author = u'LaunchDarkly' @@ -110,7 +110,7 @@ # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = 'ldclient-pydoc' +htmlhelp_basename = 'launchdarkly-server-sdk-doc' # -- Options for LaTeX output ------------------------------------------------ @@ -137,7 +137,7 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'ldclient-py.tex', u'ldclient-py Documentation', + (master_doc, 'launchdarkly-server-sdk.tex', u'launchdarkly-server-sdk Documentation', u'LaunchDarkly', 'manual'), ] @@ -147,7 +147,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'ldclient-py', u'ldclient-py Documentation', + (master_doc, 'launchdarkly-server-sdk', u'launchdarkly-server-sdk Documentation', [author], 1) ] @@ -158,8 +158,8 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'ldclient-py', u'ldclient-py Documentation', - author, 'ldclient-py', 'One line description of project.', + (master_doc, 'launchdarkly-server-sdk', u'launchdarkly-server-sdk Documentation', + author, 'launchdarkly-server-sdk', 'One line description of project.', 'Miscellaneous'), ] diff --git a/docs/index.rst b/docs/index.rst index 909ac27d..1be4daca 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,4 +1,4 @@ -.. ldclient-py documentation master file, created by +.. launchdarkly-server-sdk documentation master file, created by sphinx-quickstart on Mon Feb 4 13:16:49 2019. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. @@ -8,7 +8,7 @@ LaunchDarkly Python SDK This is the API reference for the `LaunchDarkly `_ SDK for Python. -The latest version of the SDK can be found on `PyPI `_, and the source code is on `GitHub `_. +The latest version of the SDK can be found on `PyPI `_, and the source code is on `GitHub `_. For more information, see LaunchDarkly's `Quickstart `_ and `SDK Reference Guide `_. diff --git a/setup.py b/setup.py index b69dece9..41ccf721 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ def run(self): raise SystemExit(errno) setup( - name='ldclient-py', + name='launchdarkly-server-sdk', version=ldclient_version, author='LaunchDarkly', author_email='team@launchdarkly.com', From 34b15f5b5ceb9243fc7a259322308c7f9466d02c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 11 Jun 2019 12:05:10 -0700 Subject: [PATCH 116/190] use log.warning(), not log.warn() or warnings.warn() --- ldclient/client.py | 26 +++++++++---------- ldclient/config.py | 2 +- ldclient/flag.py | 2 +- .../integrations/files/file_data_source.py | 2 +- ldclient/operators.py | 8 +++--- ldclient/sse_client.py | 4 +-- 6 files changed, 22 insertions(+), 22 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 16d91b0a..5a65201a 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -82,7 +82,7 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): "Only one of either is expected") if sdk_key is not None: - log.warn("Deprecated sdk_key argument was passed to init. Use config object instead.") + log.warning("Deprecated sdk_key argument was passed to init. Use config object instead.") self._config = Config(sdk_key=sdk_key) else: self._config = config or Config.default() @@ -113,7 +113,7 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): if self._update_processor.initialized() is True: log.info("Started LaunchDarkly Client: OK") else: - log.warn("Initialization timeout exceeded for LaunchDarkly Client or an error occurred. " + log.warning("Initialization timeout exceeded for LaunchDarkly Client or an error occurred. " "Feature Flags may not yet be available.") def _make_event_processor(self, config): @@ -139,7 +139,7 @@ def _make_update_processor(self, config, store, ready): return StreamingUpdateProcessor(config, feature_requester, store, ready) log.info("Disabling streaming API") - log.warn("You should only disable the streaming API if instructed to do so by LaunchDarkly support") + log.warning("You should only disable the streaming API if instructed to do so by LaunchDarkly support") return PollingUpdateProcessor(config, feature_requester, store, ready) def get_sdk_key(self): @@ -180,7 +180,7 @@ def track(self, event_name, user, data=None): :param data: optional additional data associated with the event """ if user is None or user.get('key') is None: - log.warn("Missing user or user key when calling track().") + log.warning("Missing user or user key when calling track().") else: self._send_event({'kind': 'custom', 'key': event_name, 'user': user, 'data': data}) @@ -194,7 +194,7 @@ def identify(self, user): :param dict user: attributes of the user to register """ if user is None or user.get('key') is None: - log.warn("Missing user or user key when calling identify().") + log.warning("Missing user or user key when calling identify().") else: self._send_event({'kind': 'identify', 'key': str(user.get('key')), 'user': user}) @@ -234,7 +234,7 @@ def toggle(self, key, user, default): .. deprecated:: 2.0.0 """ - log.warn("Deprecated method: toggle() called. Use variation() instead.") + log.warning("Deprecated method: toggle() called. Use variation() instead.") return self.variation(key, user, default) def variation(self, key, user, default): @@ -281,16 +281,16 @@ def send_event(value, variation=None, flag=None, reason=None): if not self.is_initialized(): if self._store.initialized: - log.warn("Feature Flag evaluation attempted before client has initialized - using last known values from feature store for feature key: " + key) + log.warning("Feature Flag evaluation attempted before client has initialized - using last known values from feature store for feature key: " + key) else: - log.warn("Feature Flag evaluation attempted before client has initialized! Feature store unavailable - returning default: " + log.warning("Feature Flag evaluation attempted before client has initialized! Feature store unavailable - returning default: " + str(default) + " for feature key: " + key) reason = error_reason('CLIENT_NOT_READY') send_event(default, None, None, reason) return EvaluationDetail(default, None, reason) if user is not None and user.get('key', "") == "": - log.warn("User key is blank. Flag evaluation will proceed, but the user will not be stored in LaunchDarkly.") + log.warning("User key is blank. Flag evaluation will proceed, but the user will not be stored in LaunchDarkly.") try: flag = self._store.get(FEATURES, key, lambda x: x) @@ -369,18 +369,18 @@ def all_flags_state(self, user, **kwargs): :rtype: FeatureFlagsState """ if self._config.offline: - log.warn("all_flags_state() called, but client is in offline mode. Returning empty state") + log.warning("all_flags_state() called, but client is in offline mode. Returning empty state") return FeatureFlagsState(False) if not self.is_initialized(): if self._store.initialized: - log.warn("all_flags_state() called before client has finished initializing! Using last known values from feature store") + log.warning("all_flags_state() called before client has finished initializing! Using last known values from feature store") else: - log.warn("all_flags_state() called before client has finished initializing! Feature store unavailable - returning empty state") + log.warning("all_flags_state() called before client has finished initializing! Feature store unavailable - returning empty state") return FeatureFlagsState(False) if user is None or user.get('key') is None: - log.warn("User or user key is None when calling all_flags_state(). Returning empty state.") + log.warning("User or user key is None when calling all_flags_state(). Returning empty state.") return FeatureFlagsState(False) state = FeatureFlagsState(True) diff --git a/ldclient/config.py b/ldclient/config.py index f8ef61d0..b0283d95 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -280,4 +280,4 @@ def inline_users_in_events(self): def _validate(self): if self.offline is False and self.sdk_key is None or self.sdk_key is '': - log.warn("Missing or blank sdk_key.") + log.warning("Missing or blank sdk_key.") diff --git a/ldclient/flag.py b/ldclient/flag.py index dceb699c..c7515e63 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -147,7 +147,7 @@ def _check_prerequisites(flag, user, store, events, include_reasons_in_events): for prereq in flag.get('prerequisites') or []: prereq_flag = store.get(FEATURES, prereq.get('key'), lambda x: x) if prereq_flag is None: - log.warn("Missing prereq flag: " + prereq.get('key')) + log.warning("Missing prereq flag: " + prereq.get('key')) failed_prereq = prereq else: prereq_res = _evaluate(prereq_flag, user, store, events, include_reasons_in_events) diff --git a/ldclient/impl/integrations/files/file_data_source.py b/ldclient/impl/integrations/files/file_data_source.py index 9ba6e561..785a3851 100644 --- a/ldclient/impl/integrations/files/file_data_source.py +++ b/ldclient/impl/integrations/files/file_data_source.py @@ -107,7 +107,7 @@ def _start_auto_updater(self): try: resolved_paths.append(os.path.realpath(path)) except: - log.warn('Cannot watch for changes to data file "%s" because it is an invalid path' % path) + log.warning('Cannot watch for changes to data file "%s" because it is an invalid path' % path) if have_watchdog and not self._force_polling: return _FileDataSource.WatchdogAutoUpdater(resolved_paths, self._load_all) else: diff --git a/ldclient/operators.py b/ldclient/operators.py index 253e8a8b..158455ca 100644 --- a/ldclient/operators.py +++ b/ldclient/operators.py @@ -27,7 +27,7 @@ def _string_operator(u, c, fn): def _numeric_operator(u, c, fn): # bool is a subtype of int, and we don't want to try and compare it as a number. if isinstance(input, bool): - log.warn("Got unexpected bool type when attempting to parse time") + log.warning("Got unexpected bool type when attempting to parse time") return None if isinstance(u, Number): @@ -44,7 +44,7 @@ def _parse_time(input): # bool is a subtype of int, and we don't want to try and compare it as a time. if isinstance(input, bool): - log.warn("Got unexpected bool type when attempting to parse time") + log.warning("Got unexpected bool type when attempting to parse time") return None if isinstance(input, Number): @@ -56,10 +56,10 @@ def _parse_time(input): timestamp = (parsed_time - epoch).total_seconds() return timestamp * 1000.0 except Exception as e: - log.warn("Couldn't parse timestamp:" + str(input) + " with message: " + str(e)) + log.warning("Couldn't parse timestamp:" + str(input) + " with message: " + str(e)) return None - log.warn("Got unexpected type: " + type(input) + " with value: " + str(input) + " when attempting to parse time") + log.warning("Got unexpected type: " + type(input) + " with value: " + str(input) + " when attempting to parse time") return None def _time_operator(u, c, fn): diff --git a/ldclient/sse_client.py b/ldclient/sse_client.py index 49d853c7..fcd255a3 100644 --- a/ldclient/sse_client.py +++ b/ldclient/sse_client.py @@ -7,13 +7,13 @@ import re import time -import warnings import six import urllib3 from ldclient.util import create_http_pool_manager +from ldclient.util import log from ldclient.util import throw_if_unsuccessful_response # Technically, we should support streams that mix line endings. This regex, @@ -158,7 +158,7 @@ def parse(cls, raw): m = cls.sse_line_pattern.match(line) if m is None: # Malformed line. Discard but warn. - warnings.warn('Invalid SSE line: "%s"' % line, SyntaxWarning) + log.warning('Invalid SSE line: "%s"' % line) continue name = m.groupdict()['name'] From c990266e46818e26ee026660e067a5a907eef447 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 16:19:27 -0700 Subject: [PATCH 117/190] drop events when inbox is full --- ldclient/event_processor.py | 55 ++++++++++++++++++++++----------- testing/test_event_processor.py | 30 ++++++++++++++++++ 2 files changed, 67 insertions(+), 18 deletions(-) diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 2bd4f322..cf52a2fb 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -187,7 +187,7 @@ def __init__(self, capacity): def add_event(self, event): if len(self._events) >= self._capacity: if not self._exceeded_capacity: - log.warning("Event queue is full-- dropped an event") + log.warning("Exceeded event queue capacity. Increase capacity to avoid dropping events.") self._exceeded_capacity = True else: self._events.append(event) @@ -205,13 +205,13 @@ def clear(self): class EventDispatcher(object): - def __init__(self, queue, config, http_client): - self._queue = queue + def __init__(self, inbox, config, http_client): + self._inbox = inbox self._config = config self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl) if http_client is None else http_client self._close_http = (http_client is None) # so we know whether to close it later self._disabled = False - self._buffer = EventBuffer(config.events_max_pending) + self._outbox = EventBuffer(config.events_max_pending) self._user_keys = SimpleLRUCache(config.user_keys_capacity) self._formatter = EventOutputFormatter(config) self._last_known_past_time = 0 @@ -226,7 +226,7 @@ def _run_main_loop(self): log.info("Starting event processor") while True: try: - message = self._queue.get(block=True) + message = self._inbox.get(block=True) if message.type == 'event': self._process_event(message.param) elif message.type == 'flush': @@ -248,7 +248,7 @@ def _process_event(self, event): return # Always record the event in the summarizer. - self._buffer.add_to_summary(event) + self._outbox.add_to_summary(event) # Decide whether to add the event to the payload. Feature events may be added twice, once for # the event (if tracked) and once for debugging. @@ -271,13 +271,13 @@ def _process_event(self, event): if add_index_event: ie = { 'kind': 'index', 'creationDate': event['creationDate'], 'user': user } - self._buffer.add_event(ie) + self._outbox.add_event(ie) if add_full_event: - self._buffer.add_event(event) + self._outbox.add_event(event) if add_debug_event: debug_event = event.copy() debug_event['debug'] = True - self._buffer.add_event(debug_event) + self._outbox.add_event(debug_event) # Add to the set of users we've noticed, and return true if the user was already known to us. def notice_user(self, user): @@ -298,13 +298,13 @@ def _should_debug_event(self, event): def _trigger_flush(self): if self._disabled: return - payload = self._buffer.get_payload() + payload = self._outbox.get_payload() if len(payload.events) > 0 or len(payload.summary.counters) > 0: task = EventPayloadSendTask(self._http, self._config, self._formatter, payload, self._handle_response) if self._flush_workers.execute(task.run): # The events have been handed off to a flush worker; clear them from our buffer. - self._buffer.clear() + self._outbox.clear() else: # We're already at our limit of concurrent flushes; leave the events in the buffer. pass @@ -330,22 +330,23 @@ def _do_shutdown(self): class DefaultEventProcessor(EventProcessor): - def __init__(self, config, http=None): - self._queue = queue.Queue(config.events_max_pending) + def __init__(self, config, http=None, dispatcher_class=None): + self._inbox = queue.Queue(config.events_max_pending) + self._inbox_full = False self._flush_timer = RepeatingTimer(config.flush_interval, self.flush) self._users_flush_timer = RepeatingTimer(config.user_keys_flush_interval, self._flush_users) self._flush_timer.start() self._users_flush_timer.start() self._close_lock = Lock() self._closed = False - EventDispatcher(self._queue, config, http) + (dispatcher_class or EventDispatcher)(self._inbox, config, http) def send_event(self, event): event['creationDate'] = int(time.time() * 1000) - self._queue.put(EventProcessorMessage('event', event)) + self._post_to_inbox(EventProcessorMessage('event', event)) def flush(self): - self._queue.put(EventProcessorMessage('flush', None)) + self._post_to_inbox(EventProcessorMessage('flush', None)) def stop(self): with self._close_lock: @@ -355,10 +356,21 @@ def stop(self): self._flush_timer.stop() self._users_flush_timer.stop() self.flush() + # Note that here we are not calling _post_to_inbox, because we *do* want to wait if the inbox + # is full; an orderly shutdown can't happen unless these messages are received. self._post_message_and_wait('stop') + def _post_to_inbox(self, message): + try: + self._inbox.put(message, block=False) + except queue.Full: + if not self._inbox_full: + # possible race condition here, but it's of no real consequence - we'd just get an extra log line + self._inbox_full = True + log.warning("Events are being produced faster than they can be processed; some events will be dropped") + def _flush_users(self): - self._queue.put(EventProcessorMessage('flush_users', None)) + self._inbox.put(EventProcessorMessage('flush_users', None)) # Used only in tests def _wait_until_inactive(self): @@ -366,5 +378,12 @@ def _wait_until_inactive(self): def _post_message_and_wait(self, type): reply = Event() - self._queue.put(EventProcessorMessage(type, reply)) + self._inbox.put(EventProcessorMessage(type, reply)) reply.wait() + + # These magic methods allow use of the "with" block in tests + def __enter__(self): + return self + + def __exit__(self, tyep, value, traceback): + self.stop() diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index a2e110b2..8faa78d3 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -1,5 +1,6 @@ import json import pytest +from threading import Thread import time from ldclient.config import Config @@ -460,6 +461,35 @@ def test_will_still_send_after_429_error(): def test_will_still_send_after_500_error(): verify_recoverable_http_error(500) +def test_does_not_block_on_full_inbox(): + config = Config(events_max_pending=1) # this sets the size of both the inbox and the outbox to 1 + ep_inbox_holder = [ None ] + ep_inbox = None + + def dispatcher_factory(inbox, config, http): + ep_inbox_holder[0] = inbox # it's an array because otherwise it's hard for a closure to modify a variable + return None # the dispatcher object itself doesn't matter, we only manipulate the inbox + def event_consumer(): + while True: + message = ep_inbox.get(block=True) + if message.type == 'stop': + message.param.set() + return + def start_consuming_events(): + Thread(target=event_consumer).start() + + with DefaultEventProcessor(config, mock_http, dispatcher_factory) as ep: + ep_inbox = ep_inbox_holder[0] + event1 = { 'kind': 'custom', 'key': 'event1', 'user': user } + event2 = { 'kind': 'custom', 'key': 'event2', 'user': user } + ep.send_event(event1) + ep.send_event(event2) # this event should be dropped - inbox is full + message1 = ep_inbox.get(block=False) + had_no_more = ep_inbox.empty() + start_consuming_events() + assert message1.param == event1 + assert had_no_more + def verify_unrecoverable_http_error(status): setup_processor(Config(sdk_key = 'SDK_KEY')) From e436f77d4374c7ae052f7aea095db0a550a4c01c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 16:26:44 -0700 Subject: [PATCH 118/190] rm obsolete pytest.raises parameter --- testing/test_feature_store_helpers.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/testing/test_feature_store_helpers.py b/testing/test_feature_store_helpers.py index 77ccb6f8..0e2da11b 100644 --- a/testing/test_feature_store_helpers.py +++ b/testing/test_feature_store_helpers.py @@ -137,7 +137,7 @@ def test_get_can_throw_exception(self, cached): core = MockCore() wrapper = make_wrapper(core, cached) core.error = CustomError() - with pytest.raises(CustomError, message="expected exception"): + with pytest.raises(CustomError): wrapper.get(THINGS, "key", lambda x: x) @pytest.mark.parametrize("cached", [False, True]) @@ -204,7 +204,7 @@ def test_get_all_can_throw_exception(self, cached): core = MockCore() wrapper = make_wrapper(core, cached) core.error = CustomError() - with pytest.raises(CustomError, message="expected exception"): + with pytest.raises(CustomError): wrapper.all(THINGS) @pytest.mark.parametrize("cached", [False, True]) @@ -255,7 +255,7 @@ def test_upsert_can_throw_exception(self, cached): core = MockCore() wrapper = make_wrapper(core, cached) core.error = CustomError() - with pytest.raises(CustomError, message="expected exception"): + with pytest.raises(CustomError): wrapper.upsert(THINGS, { "key": "x", "version": 1 }) @pytest.mark.parametrize("cached", [False, True]) @@ -281,7 +281,7 @@ def test_delete_can_throw_exception(self, cached): core = MockCore() wrapper = make_wrapper(core, cached) core.error = CustomError() - with pytest.raises(CustomError, message="expected exception"): + with pytest.raises(CustomError): wrapper.delete(THINGS, "x", 1) def test_uncached_initialized_queries_state_only_until_inited(self): From 1e068c9dd649df80fed8efbca38fd65b56803623 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 17:14:40 -0700 Subject: [PATCH 119/190] clean up test state management --- testing/test_event_processor.py | 653 +++++++++++++++----------------- 1 file changed, 314 insertions(+), 339 deletions(-) diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index 8faa78d3..08568b87 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -67,384 +67,361 @@ def setup_processor(config): def test_identify_event_is_queued(): - setup_processor(Config()) - - e = { 'kind': 'identify', 'user': user } - ep.send_event(e) - - output = flush_and_get_events() - assert len(output) == 1 - assert output == [{ - 'kind': 'identify', - 'creationDate': e['creationDate'], - 'key': user['key'], - 'user': user - }] + with DefaultEventProcessor(Config(), mock_http) as ep: + e = { 'kind': 'identify', 'user': user } + ep.send_event(e) + + output = flush_and_get_events(ep) + assert len(output) == 1 + assert output == [{ + 'kind': 'identify', + 'creationDate': e['creationDate'], + 'key': user['key'], + 'user': user + }] def test_user_is_filtered_in_identify_event(): - setup_processor(Config(all_attributes_private = True)) - - e = { 'kind': 'identify', 'user': user } - ep.send_event(e) - - output = flush_and_get_events() - assert len(output) == 1 - assert output == [{ - 'kind': 'identify', - 'creationDate': e['creationDate'], - 'key': user['key'], - 'user': filtered_user - }] + with DefaultEventProcessor(Config(all_attributes_private = True), mock_http) as ep: + e = { 'kind': 'identify', 'user': user } + ep.send_event(e) + + output = flush_and_get_events(ep) + assert len(output) == 1 + assert output == [{ + 'kind': 'identify', + 'creationDate': e['creationDate'], + 'key': user['key'], + 'user': filtered_user + }] def test_user_attrs_are_stringified_in_identify_event(): - setup_processor(Config()) - - e = { 'kind': 'identify', 'user': numeric_user } - ep.send_event(e) - - output = flush_and_get_events() - assert len(output) == 1 - assert output == [{ - 'kind': 'identify', - 'creationDate': e['creationDate'], - 'key': stringified_numeric_user['key'], - 'user': stringified_numeric_user - }] + with DefaultEventProcessor(Config(), mock_http) as ep: + e = { 'kind': 'identify', 'user': numeric_user } + ep.send_event(e) + + output = flush_and_get_events(ep) + assert len(output) == 1 + assert output == [{ + 'kind': 'identify', + 'creationDate': e['creationDate'], + 'key': stringified_numeric_user['key'], + 'user': stringified_numeric_user + }] def test_individual_feature_event_is_queued_with_index_event(): - setup_processor(Config()) - - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True - } - ep.send_event(e) + with DefaultEventProcessor(Config(), mock_http) as ep: + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 3 - check_index_event(output[0], e, user) - check_feature_event(output[1], e, False, None) - check_summary_event(output[2]) + output = flush_and_get_events(ep) + assert len(output) == 3 + check_index_event(output[0], e, user) + check_feature_event(output[1], e, False, None) + check_summary_event(output[2]) def test_user_is_filtered_in_index_event(): - setup_processor(Config(all_attributes_private = True)) - - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True - } - ep.send_event(e) + with DefaultEventProcessor(Config(all_attributes_private = True), mock_http) as ep: + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 3 - check_index_event(output[0], e, filtered_user) - check_feature_event(output[1], e, False, None) - check_summary_event(output[2]) + output = flush_and_get_events(ep) + assert len(output) == 3 + check_index_event(output[0], e, filtered_user) + check_feature_event(output[1], e, False, None) + check_summary_event(output[2]) def test_user_attrs_are_stringified_in_index_event(): - setup_processor(Config()) - - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': numeric_user, - 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True - } - ep.send_event(e) + with DefaultEventProcessor(Config(), mock_http) as ep: + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': numeric_user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 3 - check_index_event(output[0], e, stringified_numeric_user) - check_feature_event(output[1], e, False, None) - check_summary_event(output[2]) + output = flush_and_get_events(ep) + assert len(output) == 3 + check_index_event(output[0], e, stringified_numeric_user) + check_feature_event(output[1], e, False, None) + check_summary_event(output[2]) def test_feature_event_can_contain_inline_user(): - setup_processor(Config(inline_users_in_events = True)) - - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True - } - ep.send_event(e) + with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 2 - check_feature_event(output[0], e, False, user) - check_summary_event(output[1]) + output = flush_and_get_events(ep) + assert len(output) == 2 + check_feature_event(output[0], e, False, user) + check_summary_event(output[1]) def test_user_is_filtered_in_feature_event(): - setup_processor(Config(inline_users_in_events = True, all_attributes_private = True)) - - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True - } - ep.send_event(e) + with DefaultEventProcessor(Config(inline_users_in_events = True, all_attributes_private = True), mock_http) as ep: + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 2 - check_feature_event(output[0], e, False, filtered_user) - check_summary_event(output[1]) + output = flush_and_get_events(ep) + assert len(output) == 2 + check_feature_event(output[0], e, False, filtered_user) + check_summary_event(output[1]) def test_user_attrs_are_stringified_in_feature_event(): - setup_processor(Config(inline_users_in_events = True)) - - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': numeric_user, - 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True - } - ep.send_event(e) + with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': numeric_user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 2 - check_feature_event(output[0], e, False, stringified_numeric_user) - check_summary_event(output[1]) + output = flush_and_get_events(ep) + assert len(output) == 2 + check_feature_event(output[0], e, False, stringified_numeric_user) + check_summary_event(output[1]) def test_index_event_is_still_generated_if_inline_users_is_true_but_feature_event_is_not_tracked(): - setup_processor(Config(inline_users_in_events = True)) - - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': False - } - ep.send_event(e) + with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': False + } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 2 - check_index_event(output[0], e, user) - check_summary_event(output[1]) + output = flush_and_get_events(ep) + assert len(output) == 2 + check_index_event(output[0], e, user) + check_summary_event(output[1]) def test_two_events_for_same_user_only_produce_one_index_event(): - setup_processor(Config(user_keys_flush_interval = 300)) - - e0 = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True - } - e1 = e0.copy(); - ep.send_event(e0) - ep.send_event(e1) + with DefaultEventProcessor(Config(user_keys_flush_interval = 300), mock_http) as ep: + e0 = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + e1 = e0.copy(); + ep.send_event(e0) + ep.send_event(e1) - output = flush_and_get_events() - assert len(output) == 4 - check_index_event(output[0], e0, user) - check_feature_event(output[1], e0, False, None) - check_feature_event(output[2], e1, False, None) - check_summary_event(output[3]) + output = flush_and_get_events(ep) + assert len(output) == 4 + check_index_event(output[0], e0, user) + check_feature_event(output[1], e0, False, None) + check_feature_event(output[2], e1, False, None) + check_summary_event(output[3]) def test_new_index_event_is_added_if_user_cache_has_been_cleared(): - setup_processor(Config(user_keys_flush_interval = 0.1)) - - e0 = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True - } - e1 = e0.copy(); - ep.send_event(e0); - time.sleep(0.2) - ep.send_event(e1) - - output = flush_and_get_events() - assert len(output) == 5 - check_index_event(output[0], e0, user) - check_feature_event(output[1], e0, False, None) - check_index_event(output[2], e1, user) - check_feature_event(output[3], e1, False, None) - check_summary_event(output[4]) + with DefaultEventProcessor(Config(user_keys_flush_interval = 0.1), mock_http) as ep: + e0 = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + e1 = e0.copy(); + ep.send_event(e0); + time.sleep(0.2) + ep.send_event(e1) + + output = flush_and_get_events(ep) + assert len(output) == 5 + check_index_event(output[0], e0, user) + check_feature_event(output[1], e0, False, None) + check_index_event(output[2], e1, user) + check_feature_event(output[3], e1, False, None) + check_summary_event(output[4]) def test_event_kind_is_debug_if_flag_is_temporarily_in_debug_mode(): - setup_processor(Config()) - - future_time = now() + 100000 - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', - 'trackEvents': False, 'debugEventsUntilDate': future_time - } - ep.send_event(e) + with DefaultEventProcessor(Config(), mock_http) as ep: + future_time = now() + 100000 + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', + 'trackEvents': False, 'debugEventsUntilDate': future_time + } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 3 - check_index_event(output[0], e, user) - check_feature_event(output[1], e, True, user) - check_summary_event(output[2]) + output = flush_and_get_events(ep) + assert len(output) == 3 + check_index_event(output[0], e, user) + check_feature_event(output[1], e, True, user) + check_summary_event(output[2]) def test_event_can_be_both_tracked_and_debugged(): - setup_processor(Config()) - - future_time = now() + 100000 - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', - 'trackEvents': True, 'debugEventsUntilDate': future_time - } - ep.send_event(e) + with DefaultEventProcessor(Config(), mock_http) as ep: + future_time = now() + 100000 + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', + 'trackEvents': True, 'debugEventsUntilDate': future_time + } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 4 - check_index_event(output[0], e, user) - check_feature_event(output[1], e, False, None) - check_feature_event(output[2], e, True, user) - check_summary_event(output[3]) + output = flush_and_get_events(ep) + assert len(output) == 4 + check_index_event(output[0], e, user) + check_feature_event(output[1], e, False, None) + check_feature_event(output[2], e, True, user) + check_summary_event(output[3]) def test_debug_mode_expires_based_on_client_time_if_client_time_is_later_than_server_time(): - setup_processor(Config()) - - # Pick a server time that is somewhat behind the client time - server_time = now() - 20000 - - # Send and flush an event we don't care about, just to set the last server time - mock_http.set_server_time(server_time) - ep.send_event({ 'kind': 'identify', 'user': { 'key': 'otherUser' }}) - flush_and_get_events() - - # Now send an event with debug mode on, with a "debug until" time that is further in - # the future than the server time, but in the past compared to the client. - debug_until = server_time + 1000 - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', - 'trackEvents': False, 'debugEventsUntilDate': debug_until - } - ep.send_event(e) + with DefaultEventProcessor(Config(), mock_http) as ep: + # Pick a server time that is somewhat behind the client time + server_time = now() - 20000 + + # Send and flush an event we don't care about, just to set the last server time + mock_http.set_server_time(server_time) + ep.send_event({ 'kind': 'identify', 'user': { 'key': 'otherUser' }}) + flush_and_get_events(ep) + + # Now send an event with debug mode on, with a "debug until" time that is further in + # the future than the server time, but in the past compared to the client. + debug_until = server_time + 1000 + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', + 'trackEvents': False, 'debugEventsUntilDate': debug_until + } + ep.send_event(e) - # Should get a summary event only, not a full feature event - output = flush_and_get_events() - assert len(output) == 2 - check_index_event(output[0], e, user) - check_summary_event(output[1]) + # Should get a summary event only, not a full feature event + output = flush_and_get_events(ep) + assert len(output) == 2 + check_index_event(output[0], e, user) + check_summary_event(output[1]) def test_debug_mode_expires_based_on_server_time_if_server_time_is_later_than_client_time(): - setup_processor(Config()) - - # Pick a server time that is somewhat ahead of the client time - server_time = now() + 20000 - - # Send and flush an event we don't care about, just to set the last server time - mock_http.set_server_time(server_time) - ep.send_event({ 'kind': 'identify', 'user': { 'key': 'otherUser' }}) - flush_and_get_events() - - # Now send an event with debug mode on, with a "debug until" time that is further in - # the future than the client time, but in the past compared to the server. - debug_until = server_time - 1000 - e = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value', 'default': 'default', - 'trackEvents': False, 'debugEventsUntilDate': debug_until - } - ep.send_event(e) + with DefaultEventProcessor(Config(), mock_http) as ep: + # Pick a server time that is somewhat ahead of the client time + server_time = now() + 20000 + + # Send and flush an event we don't care about, just to set the last server time + mock_http.set_server_time(server_time) + ep.send_event({ 'kind': 'identify', 'user': { 'key': 'otherUser' }}) + flush_and_get_events(ep) + + # Now send an event with debug mode on, with a "debug until" time that is further in + # the future than the client time, but in the past compared to the server. + debug_until = server_time - 1000 + e = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', + 'trackEvents': False, 'debugEventsUntilDate': debug_until + } + ep.send_event(e) - # Should get a summary event only, not a full feature event - output = flush_and_get_events() - assert len(output) == 2 - check_index_event(output[0], e, user) - check_summary_event(output[1]) + # Should get a summary event only, not a full feature event + output = flush_and_get_events(ep) + assert len(output) == 2 + check_index_event(output[0], e, user) + check_summary_event(output[1]) def test_two_feature_events_for_same_user_generate_only_one_index_event(): - setup_processor(Config()) - - e1 = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value1', 'default': 'default', 'trackEvents': False - } - e2 = { - 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, - 'variation': 2, 'value': 'value2', 'default': 'default', 'trackEvents': False - } - ep.send_event(e1) - ep.send_event(e2) + with DefaultEventProcessor(Config(), mock_http) as ep: + e1 = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value1', 'default': 'default', 'trackEvents': False + } + e2 = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 2, 'value': 'value2', 'default': 'default', 'trackEvents': False + } + ep.send_event(e1) + ep.send_event(e2) - output = flush_and_get_events() - assert len(output) == 2 - check_index_event(output[0], e1, user) - check_summary_event(output[1]) + output = flush_and_get_events(ep) + assert len(output) == 2 + check_index_event(output[0], e1, user) + check_summary_event(output[1]) def test_nontracked_events_are_summarized(): - setup_processor(Config()) - - e1 = { - 'kind': 'feature', 'key': 'flagkey1', 'version': 11, 'user': user, - 'variation': 1, 'value': 'value1', 'default': 'default1', 'trackEvents': False - } - e2 = { - 'kind': 'feature', 'key': 'flagkey2', 'version': 22, 'user': user, - 'variation': 2, 'value': 'value2', 'default': 'default2', 'trackEvents': False - } - ep.send_event(e1) - ep.send_event(e2) - - output = flush_and_get_events() - assert len(output) == 2 - check_index_event(output[0], e1, user) - se = output[1] - assert se['kind'] == 'summary' - assert se['startDate'] == e1['creationDate'] - assert se['endDate'] == e2['creationDate'] - assert se['features'] == { - 'flagkey1': { - 'default': 'default1', - 'counters': [ { 'version': 11, 'variation': 1, 'value': 'value1', 'count': 1 } ] - }, - 'flagkey2': { - 'default': 'default2', - 'counters': [ { 'version': 22, 'variation': 2, 'value': 'value2', 'count': 1 } ] + with DefaultEventProcessor(Config(), mock_http) as ep: + e1 = { + 'kind': 'feature', 'key': 'flagkey1', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value1', 'default': 'default1', 'trackEvents': False + } + e2 = { + 'kind': 'feature', 'key': 'flagkey2', 'version': 22, 'user': user, + 'variation': 2, 'value': 'value2', 'default': 'default2', 'trackEvents': False + } + ep.send_event(e1) + ep.send_event(e2) + + output = flush_and_get_events(ep) + assert len(output) == 2 + check_index_event(output[0], e1, user) + se = output[1] + assert se['kind'] == 'summary' + assert se['startDate'] == e1['creationDate'] + assert se['endDate'] == e2['creationDate'] + assert se['features'] == { + 'flagkey1': { + 'default': 'default1', + 'counters': [ { 'version': 11, 'variation': 1, 'value': 'value1', 'count': 1 } ] + }, + 'flagkey2': { + 'default': 'default2', + 'counters': [ { 'version': 22, 'variation': 2, 'value': 'value2', 'count': 1 } ] + } } - } def test_custom_event_is_queued_with_user(): - setup_processor(Config()) - - e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '} } - ep.send_event(e) + with DefaultEventProcessor(Config(), mock_http) as ep: + e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '} } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 2 - check_index_event(output[0], e, user) - check_custom_event(output[1], e, None) + output = flush_and_get_events(ep) + assert len(output) == 2 + check_index_event(output[0], e, user) + check_custom_event(output[1], e, None) def test_custom_event_can_contain_inline_user(): - setup_processor(Config(inline_users_in_events = True)) + with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '} } + ep.send_event(e) - e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '} } - ep.send_event(e) - - output = flush_and_get_events() - assert len(output) == 1 - check_custom_event(output[0], e, user) + output = flush_and_get_events(ep) + assert len(output) == 1 + check_custom_event(output[0], e, user) def test_user_is_filtered_in_custom_event(): - setup_processor(Config(inline_users_in_events = True, all_attributes_private = True)) - - e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '} } - ep.send_event(e) + with DefaultEventProcessor(Config(inline_users_in_events = True, all_attributes_private = True), mock_http) as ep: + e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '} } + ep.send_event(e) - output = flush_and_get_events() - assert len(output) == 1 - check_custom_event(output[0], e, filtered_user) + output = flush_and_get_events(ep) + assert len(output) == 1 + check_custom_event(output[0], e, filtered_user) def test_user_attrs_are_stringified_in_custom_event(): - setup_processor(Config(inline_users_in_events = True)) + with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + e = { 'kind': 'custom', 'key': 'eventkey', 'user': numeric_user, 'data': { 'thing': 'stuff '} } + ep.send_event(e) - e = { 'kind': 'custom', 'key': 'eventkey', 'user': numeric_user, 'data': { 'thing': 'stuff '} } - ep.send_event(e) - - output = flush_and_get_events() - assert len(output) == 1 - check_custom_event(output[0], e, stringified_numeric_user) + output = flush_and_get_events(ep) + assert len(output) == 1 + check_custom_event(output[0], e, stringified_numeric_user) def test_nothing_is_sent_if_there_are_no_events(): - setup_processor(Config()) - ep.flush() - ep._wait_until_inactive() - assert mock_http.request_data is None + with DefaultEventProcessor(Config(), mock_http) as ep: + ep.flush() + ep._wait_until_inactive() + assert mock_http.request_data is None def test_sdk_key_is_sent(): - setup_processor(Config(sdk_key = 'SDK_KEY')) - - ep.send_event({ 'kind': 'identify', 'user': user }) - ep.flush() - ep._wait_until_inactive() + with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() - assert mock_http.request_headers.get('Authorization') is 'SDK_KEY' + assert mock_http.request_headers.get('Authorization') is 'SDK_KEY' def test_no_more_payloads_are_sent_after_401_error(): verify_unrecoverable_http_error(401) @@ -491,34 +468,32 @@ def start_consuming_events(): assert had_no_more def verify_unrecoverable_http_error(status): - setup_processor(Config(sdk_key = 'SDK_KEY')) - - mock_http.set_response_status(status) - ep.send_event({ 'kind': 'identify', 'user': user }) - ep.flush() - ep._wait_until_inactive() - mock_http.reset() - - ep.send_event({ 'kind': 'identify', 'user': user }) - ep.flush() - ep._wait_until_inactive() - assert mock_http.request_data is None + with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: + mock_http.set_response_status(status) + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + mock_http.reset() + + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + assert mock_http.request_data is None def verify_recoverable_http_error(status): - setup_processor(Config(sdk_key = 'SDK_KEY')) - - mock_http.set_response_status(status) - ep.send_event({ 'kind': 'identify', 'user': user }) - ep.flush() - ep._wait_until_inactive() - mock_http.reset() - - ep.send_event({ 'kind': 'identify', 'user': user }) - ep.flush() - ep._wait_until_inactive() - assert mock_http.request_data is not None - -def flush_and_get_events(): + with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: + mock_http.set_response_status(status) + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + mock_http.reset() + + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + assert mock_http.request_data is not None + +def flush_and_get_events(ep): ep.flush() ep._wait_until_inactive() if mock_http.request_data is None: From 4b74fcff401d8dcac94822920a18d1de1fcafc1c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 17:18:46 -0700 Subject: [PATCH 120/190] typo --- ldclient/event_processor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index cf52a2fb..74baf37d 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -385,5 +385,5 @@ def _post_message_and_wait(self, type): def __enter__(self): return self - def __exit__(self, tyep, value, traceback): + def __exit__(self, type, value, traceback): self.stop() From ee7a51c14d42006887b2809366495f42fa3f402a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 20:30:57 -0700 Subject: [PATCH 121/190] store the package version in just one place --- .ldrelease/update-version.sh | 9 --------- scripts/release.sh | 7 +------ setup.py | 6 ++---- 3 files changed, 3 insertions(+), 19 deletions(-) delete mode 100755 .ldrelease/update-version.sh diff --git a/.ldrelease/update-version.sh b/.ldrelease/update-version.sh deleted file mode 100755 index a8edafa1..00000000 --- a/.ldrelease/update-version.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -# Update version in ldclient/version.py -echo "VERSION = \"${LD_RELEASE_VERSION}\"" > ldclient/version.py - -# Update version in setup.py -SETUP_PY_TEMP=./setup.py.tmp -sed "s/ldclient_version=.*/ldclient_version='${LD_RELEASE_VERSION}'/g" setup.py > ${SETUP_PY_TEMP} -mv ${SETUP_PY_TEMP} setup.py diff --git a/scripts/release.sh b/scripts/release.sh index 0f1808b7..d2b24e73 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -13,14 +13,9 @@ echo "Starting python-server-sdk release." VERSION=$1 -#Update version in ldclient/version.py +# Update version in ldclient/version.py - setup.py references this constant echo "VERSION = \"${VERSION}\"" > ldclient/version.py -# Update version in setup.py -SETUP_PY_TEMP=./setup.py.tmp -sed "s/ldclient_version=.*/ldclient_version='${VERSION}'/g" setup.py > ${SETUP_PY_TEMP} -mv ${SETUP_PY_TEMP} setup.py - # Prepare distribution python setup.py sdist diff --git a/setup.py b/setup.py index 2aec3cf0..9b110b4f 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ from setuptools import find_packages, setup, Command +import ldclient import sys import uuid @@ -9,9 +10,6 @@ def parse_requirements(filename): lineiter = (line.strip() for line in open(filename)) return [line for line in lineiter if line and not line.startswith("#")] - -ldclient_version='6.9.4' - # parse_requirements() returns generator of pip.req.InstallRequirement objects install_reqs = parse_requirements('requirements.txt') test_reqs = parse_requirements('test-requirements.txt') @@ -45,7 +43,7 @@ def run(self): setup( name='launchdarkly-server-sdk', - version=ldclient_version, + version=ldclient.VERSION, author='LaunchDarkly', author_email='team@launchdarkly.com', packages=find_packages(), From 1c10e1e293c63dadd67f1d0ca2610f8b18b7a9a7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 20:37:50 -0700 Subject: [PATCH 122/190] fix package reference --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 9b110b4f..1d305a9e 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,5 @@ from setuptools import find_packages, setup, Command -import ldclient +from ldclient.version import VERSION import sys import uuid @@ -43,7 +43,7 @@ def run(self): setup( name='launchdarkly-server-sdk', - version=ldclient.VERSION, + version=VERSION, author='LaunchDarkly', author_email='team@launchdarkly.com', packages=find_packages(), From d9c96dd03a1369b270b5cb276713a20ce3fa47eb Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 20:42:16 -0700 Subject: [PATCH 123/190] add requirements --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 46e2166e..9d26ec77 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,6 +16,7 @@ test-template: &test-template name: install requirements command: | sudo pip install --upgrade pip virtualenv; + sudo pip install -r requirements.txt; sudo pip install -r test-requirements.txt; if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then sudo pip install -r test-filesource-optional-requirements.txt; From 73d20f733cad0c6717e4e34ecfc82db38a754448 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 21:11:14 -0700 Subject: [PATCH 124/190] don't import ldclient.version directly --- setup.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 1d305a9e..a0f4452f 100644 --- a/setup.py +++ b/setup.py @@ -1,10 +1,16 @@ from setuptools import find_packages, setup, Command -from ldclient.version import VERSION import sys import uuid - +# Get VERSION constant from ldclient.version - we can't simply import that module because +# ldclient/__init__.py imports all kinds of stuff that requires dependencies we may not have +# loaded yet. Based on https://packaging.python.org/guides/single-sourcing-package-version/ +version_module_globals = {} +with open('./ldclient/version.py') as f: + exec(f.read(), version_module_globals) +ldclient_version = version_module_globals['VERSION'] + def parse_requirements(filename): """ load requirements from a pip requirements file """ lineiter = (line.strip() for line in open(filename)) @@ -43,7 +49,7 @@ def run(self): setup( name='launchdarkly-server-sdk', - version=VERSION, + version=ldclient_version, author='LaunchDarkly', author_email='team@launchdarkly.com', packages=find_packages(), From 0a0aa8fae746f51183350cd999abb46d9b492285 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 21:11:19 -0700 Subject: [PATCH 125/190] Revert "add requirements" This reverts commit d9c96dd03a1369b270b5cb276713a20ce3fa47eb. --- .circleci/config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9d26ec77..46e2166e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,7 +16,6 @@ test-template: &test-template name: install requirements command: | sudo pip install --upgrade pip virtualenv; - sudo pip install -r requirements.txt; sudo pip install -r test-requirements.txt; if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then sudo pip install -r test-filesource-optional-requirements.txt; From 11f0da63242cf195a989b29bebb9f0f6438d2323 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 21:30:48 -0700 Subject: [PATCH 126/190] fix merge error + adjust for some event properties now being optional --- ldclient/client.py | 2 +- ldclient/event_processor.py | 2 +- ldclient/event_summarizer.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index ac087c4d..825d542c 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -397,7 +397,7 @@ def all_flags_state(self, user, **kwargs): if client_only and not flag.get('clientSide', False): continue try: - detail = evaluate(flag, user, self._store, False).detail + detail = evaluate(flag, user, self._store, self._event_factory_default).detail state.add_flag(flag, detail.value, detail.variation_index, detail.reason if with_reasons else None, details_only_if_tracked) except Exception as e: diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 5a532861..f66e0e57 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -259,7 +259,7 @@ def _process_event(self, event): add_debug_event = False add_index_event = False if event['kind'] == "feature": - add_full_event = event['trackEvents'] + add_full_event = event.get('trackEvents') add_debug_event = self._should_debug_event(event) else: add_full_event = True diff --git a/ldclient/event_summarizer.py b/ldclient/event_summarizer.py index c0aa5aeb..64956fdc 100644 --- a/ldclient/event_summarizer.py +++ b/ldclient/event_summarizer.py @@ -20,7 +20,7 @@ def __init__(self): """ def summarize_event(self, event): if event['kind'] == 'feature': - counter_key = (event['key'], event['variation'], event['version']) + counter_key = (event['key'], event.get('variation'), event['version']) counter_val = self.counters.get(counter_key) if counter_val is None: counter_val = { 'count': 1, 'value': event['value'], 'default': event.get('default') } From 17bfa5ab690ace0d1ca924351a657c3b8dc4c36d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 23:05:26 -0700 Subject: [PATCH 127/190] fix summary logic again for now-optional event properties --- ldclient/event_summarizer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/event_summarizer.py b/ldclient/event_summarizer.py index 64956fdc..e046a347 100644 --- a/ldclient/event_summarizer.py +++ b/ldclient/event_summarizer.py @@ -20,7 +20,7 @@ def __init__(self): """ def summarize_event(self, event): if event['kind'] == 'feature': - counter_key = (event['key'], event.get('variation'), event['version']) + counter_key = (event['key'], event.get('variation'), event.get('version')) counter_val = self.counters.get(counter_key) if counter_val is None: counter_val = { 'count': 1, 'value': event['value'], 'default': event.get('default') } From a5da01068ecb320960f5b30f5493c1a299266082 Mon Sep 17 00:00:00 2001 From: Gabor Angeli Date: Fri, 25 Oct 2019 16:54:04 -0700 Subject: [PATCH 128/190] Allow explicitly proxying only ld requests (#130) --- ldclient/config.py | 10 +++++++++- ldclient/event_processor.py | 2 +- ldclient/feature_requester.py | 2 +- ldclient/sse_client.py | 5 +++-- ldclient/streaming.py | 3 ++- ldclient/util.py | 37 ++++++++++++++++++++++++++++------- 6 files changed, 46 insertions(+), 13 deletions(-) diff --git a/ldclient/config.py b/ldclient/config.py index b0283d95..0e4ab391 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -43,7 +43,8 @@ def __init__(self, offline=False, user_keys_capacity=1000, user_keys_flush_interval=300, - inline_users_in_events=False): + inline_users_in_events=False, + http_proxy=None): """ :param string sdk_key: The SDK key for your LaunchDarkly account. :param string base_uri: The base URL for the LaunchDarkly server. Most users should use the default @@ -95,6 +96,8 @@ def __init__(self, :type event_processor_class: (ldclient.config.Config) -> EventProcessor :param update_processor_class: A factory for an UpdateProcessor implementation taking the sdk key, config, and FeatureStore implementation + :param http_proxy: Use a proxy when connecting to LaunchDarkly. This is the full URI of the + proxy; for example: http://my-proxy.com:1234. """ self.__sdk_key = sdk_key @@ -126,6 +129,7 @@ def __init__(self, self.__user_keys_capacity = user_keys_capacity self.__user_keys_flush_interval = user_keys_flush_interval self.__inline_users_in_events = inline_users_in_events + self.__http_proxy = http_proxy @classmethod def default(cls): @@ -278,6 +282,10 @@ def user_keys_flush_interval(self): def inline_users_in_events(self): return self.__inline_users_in_events + @property + def http_proxy(self): + return self.__http_proxy + def _validate(self): if self.offline is False and self.sdk_key is None or self.sdk_key is '': log.warning("Missing or blank sdk_key.") diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index f66e0e57..5d0e429d 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -211,7 +211,7 @@ class EventDispatcher(object): def __init__(self, inbox, config, http_client): self._inbox = inbox self._config = config - self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl) if http_client is None else http_client + self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl, proxy_url=config.http_proxy) if http_client is None else http_client self._close_http = (http_client is None) # so we know whether to close it later self._disabled = False self._outbox = EventBuffer(config.events_max_pending) diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 51aee6a0..75dc373d 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -25,7 +25,7 @@ class FeatureRequesterImpl(FeatureRequester): def __init__(self, config): self._cache = dict() - self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl) + self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl, proxy_url=config.http_proxy) self._config = config def get_all_data(self): diff --git a/ldclient/sse_client.py b/ldclient/sse_client.py index fcd255a3..2603f31f 100644 --- a/ldclient/sse_client.py +++ b/ldclient/sse_client.py @@ -23,7 +23,7 @@ class SSEClient(object): def __init__(self, url, last_id=None, retry=3000, connect_timeout=10, read_timeout=300, chunk_size=10000, - verify_ssl=False, http=None, **kwargs): + verify_ssl=False, http=None, http_proxy=None, **kwargs): self.url = url self.last_id = last_id self.retry = retry @@ -32,7 +32,8 @@ def __init__(self, url, last_id=None, retry=3000, connect_timeout=10, read_timeo self._chunk_size = chunk_size # Optional support for passing in an HTTP client - self.http = create_http_pool_manager(num_pools=1, verify_ssl=verify_ssl) + self.http = create_http_pool_manager(num_pools=1, verify_ssl=verify_ssl, + proxy_url=http_proxy) # Any extra kwargs will be fed into the request call later. self.requests_kwargs = kwargs diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 43e815a4..b279da9a 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -89,7 +89,8 @@ def _connect(self): headers=_stream_headers(self._config.sdk_key), connect_timeout=self._config.connect_timeout, read_timeout=stream_read_timeout, - verify_ssl=self._config.verify_ssl) + verify_ssl=self._config.verify_ssl, + http_proxy=self._config.http_proxy) def stop(self): log.info("Stopping StreamingUpdateProcessor") diff --git a/ldclient/util.py b/ldclient/util.py index 229030b8..1b5de3d2 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -84,14 +84,37 @@ def status(self): return self._status -def create_http_pool_manager(num_pools=1, verify_ssl=False): +def create_http_pool_manager(num_pools=1, verify_ssl=False, proxy_url=None): + """ + Create an http pool + + :param num_pools: The number of connections in the pool. + :param verify_ssl: If true, force the connections to verify valid SSL. + :param proxy_url: If set, proxy connections through the proxy at this URL. + + :return: A connection pool that implements urllib3.PoolManager + """ if not verify_ssl: - return urllib3.PoolManager(num_pools=num_pools) - return urllib3.PoolManager( - num_pools=num_pools, - cert_reqs='CERT_REQUIRED', - ca_certs=certifi.where() - ) + # Case: create a manager that does not need to respect SSL + if proxy_url is not None: + return urllib3.ProxyManager(num_pools=num_pools, proxy_url=proxy_url) + else: + return urllib3.PoolManager(num_pools=num_pools) + else: + # Case: force the connection to respect SSL + if proxy_url is not None: + return urllib3.ProxyManager( + num_pools=num_pools, + cert_reqs='CERT_REQUIRED', + ca_certs=certifi.where(), + proxy_url=proxy_url + ) + else: + return urllib3.PoolManager( + num_pools=num_pools, + cert_reqs='CERT_REQUIRED', + ca_certs=certifi.where() + ) def throw_if_unsuccessful_response(resp): From 69f2233c0a7db19d4e6c4cb2946710a74c39c25f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Oct 2019 13:22:36 -0700 Subject: [PATCH 129/190] fix broken indirect/patch request, add tests for feature requestor --- ldclient/feature_requester.py | 4 +- testing/http_util.py | 94 ++++++++++++++++++++++ testing/test_feature_requester.py | 127 ++++++++++++++++++++++++++++++ 3 files changed, 223 insertions(+), 2 deletions(-) create mode 100644 testing/http_util.py create mode 100644 testing/test_feature_requester.py diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 51aee6a0..16a79981 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -36,7 +36,7 @@ def get_all_data(self): } def get_one(self, kind, key): - return self._do_request(kind.request_api_path + '/' + key, False) + return self._do_request(self._config.base_uri + kind.request_api_path + '/' + key, False) def _do_request(self, uri, allow_cache): hdrs = _headers(self._config.sdk_key) @@ -49,7 +49,7 @@ def _do_request(self, uri, allow_cache): timeout=urllib3.Timeout(connect=self._config.connect_timeout, read=self._config.read_timeout), retries=1) throw_if_unsuccessful_response(r) - if r.status == 304 and cache_entry is not None: + if r.status == 304 and allow_cache and cache_entry is not None: data = cache_entry.data etag = cache_entry.etag from_cache = True diff --git a/testing/http_util.py b/testing/http_util.py new file mode 100644 index 00000000..321f71cc --- /dev/null +++ b/testing/http_util.py @@ -0,0 +1,94 @@ +from http.server import BaseHTTPRequestHandler, HTTPServer +import json +from queue import Queue +from six import iteritems +import socket +from threading import Thread + +def get_available_port(): + s = socket.socket(socket.AF_INET, type = socket.SOCK_STREAM) + s.bind(('localhost', 0)) + _, port = s.getsockname() + s.close() + return port + +def start_server(): + sw = MockServerWrapper(get_available_port()) + sw.start() + return sw + +class MockServerWrapper(Thread): + def __init__(self, port): + Thread.__init__(self) + self.port = port + self.uri = 'http://localhost:%d' % port + self.server = HTTPServer(('localhost', port), MockServerRequestHandler) + self.server.server_wrapper = self + self.matchers = {} + self.requests = Queue() + + def close(self): + self.server.shutdown() + self.server.server_close() + + def run(self): + self.server.serve_forever() + + def setup_response(self, uri_path, status, body = None, headers = None): + self.matchers[uri_path] = MockServerResponse(status, body, headers) + + def setup_json_response(self, uri_path, data, headers = None): + final_headers = {} if headers is None else headers.copy() + final_headers['Content-Type'] = 'application/json' + return self.setup_response(uri_path, 200, json.dumps(data), headers) + + def await_request(self): + return self.requests.get() + + def require_request(self): + return self.requests.get(block=False) + + # enter/exit magic methods allow server to be auto-closed by "with" statement + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + +class MockServerRequestHandler(BaseHTTPRequestHandler): + # def __init__(self, server_wrapper, request, client_address, server): + # self.server_wrapper = server_wrapper + # BaseHTTPRequestHandler.__init__(self, request, client_address, server) + + def do_GET(self): + self._do_request('GET') + + def do_POST(self): + self._do_request('POST') + + def _do_request(self, method): + server_wrapper = self.server.server_wrapper + server_wrapper.requests.put(MockServerRequest(method, self.path, self.headers)) + if self.path in server_wrapper.matchers: + resp = server_wrapper.matchers[self.path] + self.send_response(resp.status) + if resp.headers is not None: + for key, value in iteritems(resp.headers): + self.send_header(key, value) + self.end_headers() + if resp.body is not None: + self.wfile.write(resp.body) + else: + self.send_error(404) + +class MockServerRequest(object): + def __init__(self, method, path, headers): + self.method = method + self.path = path + self.headers = headers + +class MockServerResponse(object): + def __init__(self, status, body, headers): + self.status = status + self.body = body + self.headers = headers diff --git a/testing/test_feature_requester.py b/testing/test_feature_requester.py new file mode 100644 index 00000000..569f1ef9 --- /dev/null +++ b/testing/test_feature_requester.py @@ -0,0 +1,127 @@ +import pytest + +from ldclient.config import Config +from ldclient.feature_requester import FeatureRequesterImpl +from ldclient.util import UnsuccessfulResponseException +from ldclient.version import VERSION +from ldclient.versioned_data_kind import FEATURES, SEGMENTS +from testing.http_util import start_server + + +def test_get_all_data_returns_data(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri) + fr = FeatureRequesterImpl(config) + + flags = { 'flag1': { 'key': 'flag1' } } + segments = { 'segment1': { 'key': 'segment1' } } + resp_data = { 'flags': flags, 'segments': segments } + expected_data = { FEATURES: flags, SEGMENTS: segments } + server.setup_json_response('/sdk/latest-all', resp_data) + + result = fr.get_all_data() + assert result == expected_data + +def test_get_all_data_sends_headers(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri) + fr = FeatureRequesterImpl(config) + + resp_data = { 'flags': {}, 'segments': {} } + server.setup_json_response('/sdk/latest-all', resp_data) + + fr.get_all_data() + req = server.require_request() + assert req.headers['Authorization'] == 'sdk-key' + assert req.headers['User-Agent'] == 'PythonClient/' + VERSION + +def test_get_all_data_can_use_cached_data(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri) + fr = FeatureRequesterImpl(config) + + etag1 = 'my-etag-1' + etag2 = 'my-etag-2' + resp_data1 = { 'flags': {}, 'segments': {} } + resp_data2 = { 'flags': { 'flag1': { 'key': 'flag1' } }, 'segments': {} } + expected_data1 = { FEATURES: {}, SEGMENTS: {} } + expected_data2 = { FEATURES: { 'flag1': { 'key': 'flag1' } }, SEGMENTS: {} } + req_path = '/sdk/latest-all' + server.setup_json_response(req_path, resp_data1, { 'Etag': etag1 }) + + result = fr.get_all_data() + assert result == expected_data1 + req = server.require_request() + assert 'If-None-Match' not in req.headers.keys() + + server.setup_response(req_path, 304, None, { 'Etag': etag1 }) + + result = fr.get_all_data() + assert result == expected_data1 + req = server.require_request() + assert req.headers['If-None-Match'] == etag1 + + server.setup_json_response(req_path, resp_data2, { 'Etag': etag2 }) + + result = fr.get_all_data() + assert result == expected_data2 + req = server.require_request() + assert req.headers['If-None-Match'] == etag1 + + server.setup_response(req_path, 304, None, { 'Etag': etag2 }) + + result = fr.get_all_data() + assert result == expected_data2 + req = server.require_request() + assert req.headers['If-None-Match'] == etag2 + +def test_get_one_flag_returns_data(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri) + fr = FeatureRequesterImpl(config) + key = 'flag1' + flag_data = { 'key': key } + server.setup_json_response('/sdk/latest-flags/' + key, flag_data) + result = fr.get_one(FEATURES, key) + assert result == flag_data + +def test_get_one_flag_sends_headers(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri) + fr = FeatureRequesterImpl(config) + key = 'flag1' + flag_data = { 'key': key } + server.setup_json_response('/sdk/latest-flags/' + key, flag_data) + fr.get_one(FEATURES, key) + req = server.require_request() + assert req.headers['Authorization'] == 'sdk-key' + assert req.headers['User-Agent'] == 'PythonClient/' + VERSION + +def test_get_one_flag_throws_on_error(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri) + fr = FeatureRequesterImpl(config) + with pytest.raises(UnsuccessfulResponseException) as e: + fr.get_one(FEATURES, 'didnt-set-up-a-response-for-this-flag') + assert e.value.status == 404 + +def test_get_one_flag_does_not_use_etags(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri) + fr = FeatureRequesterImpl(config) + + etag = 'my-etag' + key = 'flag1' + flag_data = { 'key': key } + req_path = '/sdk/latest-flags/' + key + server.setup_json_response(req_path, flag_data, { 'Etag': etag }) + + result = fr.get_one(FEATURES, key) + assert result == flag_data + req = server.require_request() + assert 'If-None-Match' not in req.headers.keys() + + result = fr.get_one(FEATURES, key) + assert result == flag_data + req = server.require_request() + assert 'If-None-Match' not in req.headers.keys() # did not send etag from previous request From 0fa5e05d5cafaf810a2127ef4e4e0d94ba680781 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Oct 2019 13:37:13 -0700 Subject: [PATCH 130/190] Python 2/3 compatibility for HTTPServer --- testing/http_util.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/testing/http_util.py b/testing/http_util.py index 321f71cc..a8c1fde7 100644 --- a/testing/http_util.py +++ b/testing/http_util.py @@ -1,7 +1,7 @@ -from http.server import BaseHTTPRequestHandler, HTTPServer import json from queue import Queue from six import iteritems +from six.moves import BaseHTTPServer import socket from threading import Thread @@ -22,7 +22,7 @@ def __init__(self, port): Thread.__init__(self) self.port = port self.uri = 'http://localhost:%d' % port - self.server = HTTPServer(('localhost', port), MockServerRequestHandler) + self.server = BaseHTTPServer.HTTPServer(('localhost', port), MockServerRequestHandler) self.server.server_wrapper = self self.matchers = {} self.requests = Queue() @@ -55,7 +55,7 @@ def __enter__(self): def __exit__(self, type, value, traceback): self.close() -class MockServerRequestHandler(BaseHTTPRequestHandler): +class MockServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): # def __init__(self, server_wrapper, request, client_address, server): # self.server_wrapper = server_wrapper # BaseHTTPRequestHandler.__init__(self, request, client_address, server) From e75ff0fc70bebddf40b975340935b9c39246fb5f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Oct 2019 13:48:39 -0700 Subject: [PATCH 131/190] Py2/3 compatibility: queue --- testing/http_util.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/testing/http_util.py b/testing/http_util.py index a8c1fde7..3a881f57 100644 --- a/testing/http_util.py +++ b/testing/http_util.py @@ -1,7 +1,6 @@ import json -from queue import Queue from six import iteritems -from six.moves import BaseHTTPServer +from six.moves import BaseHTTPServer, queue import socket from threading import Thread @@ -25,7 +24,7 @@ def __init__(self, port): self.server = BaseHTTPServer.HTTPServer(('localhost', port), MockServerRequestHandler) self.server.server_wrapper = self self.matchers = {} - self.requests = Queue() + self.requests = queue.Queue() def close(self): self.server.shutdown() From 68161a294d4266b4f7c74b2a9a243758c843d38b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Oct 2019 13:58:55 -0700 Subject: [PATCH 132/190] more Py3 compatibility --- testing/http_util.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/testing/http_util.py b/testing/http_util.py index 3a881f57..4e54890c 100644 --- a/testing/http_util.py +++ b/testing/http_util.py @@ -1,3 +1,4 @@ +from builtins import bytes import json from six import iteritems from six.moves import BaseHTTPServer, queue @@ -76,7 +77,7 @@ def _do_request(self, method): self.send_header(key, value) self.end_headers() if resp.body is not None: - self.wfile.write(resp.body) + self.wfile.write(bytes(resp.body, 'utf-8')) else: self.send_error(404) From 74c9eed339ee640f86048cc9b979bc0714d1d001 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Oct 2019 15:12:54 -0700 Subject: [PATCH 133/190] don't need import of builtins --- testing/http_util.py | 1 - 1 file changed, 1 deletion(-) diff --git a/testing/http_util.py b/testing/http_util.py index 4e54890c..992c5d01 100644 --- a/testing/http_util.py +++ b/testing/http_util.py @@ -1,4 +1,3 @@ -from builtins import bytes import json from six import iteritems from six.moves import BaseHTTPServer, queue From 032b04ca8f512920a43d2d7176e41eaf343e4a0f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Oct 2019 15:44:30 -0700 Subject: [PATCH 134/190] fix string encoding --- testing/http_util.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testing/http_util.py b/testing/http_util.py index 992c5d01..009458bd 100644 --- a/testing/http_util.py +++ b/testing/http_util.py @@ -76,7 +76,7 @@ def _do_request(self, method): self.send_header(key, value) self.end_headers() if resp.body is not None: - self.wfile.write(bytes(resp.body, 'utf-8')) + self.wfile.write(resp.body.encode('UTF-8')) else: self.send_error(404) From 89ce3e2652ae9a4b99a37ccf6a3d882bf068aeb6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Oct 2019 17:19:10 -0700 Subject: [PATCH 135/190] implement setting proxy URL by environment variable --- ldclient/event_processor.py | 3 +- ldclient/feature_requester.py | 2 +- ldclient/sse_client.py | 2 +- ldclient/streaming.py | 10 ++++ ldclient/util.py | 39 ++++++++++++--- testing/http_util.py | 14 +++--- testing/test_event_processor.py | 38 +++++++++++++++ testing/test_feature_requester.py | 38 +++++++++++++++ testing/test_streaming.py | 81 +++++++++++++++++++++++++++++++ 9 files changed, 211 insertions(+), 16 deletions(-) create mode 100644 testing/test_streaming.py diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 5ffd8517..1f9c5649 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -211,7 +211,8 @@ class EventDispatcher(object): def __init__(self, inbox, config, http_client): self._inbox = inbox self._config = config - self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl) if http_client is None else http_client + self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl, + target_base_uri=config.events_uri) if http_client is None else http_client self._close_http = (http_client is None) # so we know whether to close it later self._disabled = False self._outbox = EventBuffer(config.events_max_pending) diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 16a79981..e14ebfe5 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -25,7 +25,7 @@ class FeatureRequesterImpl(FeatureRequester): def __init__(self, config): self._cache = dict() - self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl) + self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl, target_base_uri=config.base_uri) self._config = config def get_all_data(self): diff --git a/ldclient/sse_client.py b/ldclient/sse_client.py index fcd255a3..4aeee9f2 100644 --- a/ldclient/sse_client.py +++ b/ldclient/sse_client.py @@ -32,7 +32,7 @@ def __init__(self, url, last_id=None, retry=3000, connect_timeout=10, read_timeo self._chunk_size = chunk_size # Optional support for passing in an HTTP client - self.http = create_http_pool_manager(num_pools=1, verify_ssl=verify_ssl) + self.http = create_http_pool_manager(num_pools=1, verify_ssl=verify_ssl, target_base_uri=url) # Any extra kwargs will be fed into the request call later. self.requests_kwargs = kwargs diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 43e815a4..d5a2375b 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -56,11 +56,14 @@ def run(self): for msg in messages: if not self._running: break + print('*** msg: %s' % msg.event) message_ok = self.process_message(self._store, self._requester, msg) if message_ok is True and self._ready.is_set() is False: + print('*** inited') log.info("StreamingUpdateProcessor initialized ok.") self._ready.set() except UnsuccessfulResponseException as e: + print('*** nope: %s' % e) log.error(http_error_message(e.status, "stream connection")) if not is_http_error_recoverable(e.status): self._ready.set() # if client is initializing, make it stop waiting; has no effect if already inited @@ -154,3 +157,10 @@ def _parse_path(path): if path.startswith(kind.stream_api_path): return ParsedPath(kind = kind, key = path[len(kind.stream_api_path):]) return None + + # magic methods for "with" statement (used in testing) + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.stop() diff --git a/ldclient/util.py b/ldclient/util.py index 229030b8..391a5ed6 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -5,6 +5,7 @@ import certifi import logging +from os import environ import six import sys import urllib3 @@ -84,15 +85,41 @@ def status(self): return self._status -def create_http_pool_manager(num_pools=1, verify_ssl=False): +def create_http_pool_manager(num_pools=1, verify_ssl=False, target_base_uri=None): + proxy_url = _get_proxy_url(target_base_uri) + if not verify_ssl: - return urllib3.PoolManager(num_pools=num_pools) - return urllib3.PoolManager( - num_pools=num_pools, - cert_reqs='CERT_REQUIRED', - ca_certs=certifi.where() + if proxy_url is None: + print("no proxy for %s" % target_base_uri) + return urllib3.PoolManager(num_pools=num_pools) + else: + print("the proxy is %s for %s" % (proxy_url, target_base_uri)) + return urllib3.ProxyManager(proxy_url, num_pools=num_pools) + + if proxy_url is None: + print("no proxy for %s" % target_base_uri) + return urllib3.PoolManager( + num_pools=num_pools, + cert_reqs='CERT_REQUIRED', + ca_certs=certifi.where() + ) + else: + print("the proxy is %s for %s" % (proxy_url, target_base_uri)) + return urllib3.ProxyManager( + proxy_url, + num_pools=num_pools, + cert_reqs='CERT_REQUIRED', + ca_certs=certifi.where() ) +def _get_proxy_url(target_base_uri): + if target_base_uri is None: + return None + is_https = target_base_uri.startswith('https:') + if is_https: + return environ.get('https_proxy') + return environ.get('http_proxy') + def throw_if_unsuccessful_response(resp): if resp.status >= 400: diff --git a/testing/http_util.py b/testing/http_util.py index 009458bd..333eeac6 100644 --- a/testing/http_util.py +++ b/testing/http_util.py @@ -55,19 +55,19 @@ def __exit__(self, type, value, traceback): self.close() class MockServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler): - # def __init__(self, server_wrapper, request, client_address, server): - # self.server_wrapper = server_wrapper - # BaseHTTPRequestHandler.__init__(self, request, client_address, server) + def do_CONNECT(self): + self._do_request() def do_GET(self): - self._do_request('GET') + self._do_request() def do_POST(self): - self._do_request('POST') + self._do_request() - def _do_request(self, method): + def _do_request(self): + print('*** %s %s' % (self.command, self.path)) server_wrapper = self.server.server_wrapper - server_wrapper.requests.put(MockServerRequest(method, self.path, self.headers)) + server_wrapper.requests.put(MockServerRequest(self.command, self.path, self.headers)) if self.path in server_wrapper.matchers: resp = server_wrapper.matchers[self.path] self.send_response(resp.status) diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index 4d24454b..44ed3609 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -6,6 +6,7 @@ from ldclient.config import Config from ldclient.event_processor import DefaultEventProcessor from ldclient.util import log +from testing.http_util import start_server from testing.stub_util import MockResponse, MockHttp @@ -467,6 +468,43 @@ def start_consuming_events(): assert message1.param == event1 assert had_no_more +def test_can_use_http_proxy_via_environment_var(monkeypatch): + fake_events_uri = 'http://not-real' + + with start_server() as server: + monkeypatch.setenv('http_proxy', server.uri) + config = Config(sdk_key = 'sdk-key', events_uri = fake_events_uri) + server.setup_response(fake_events_uri + '/bulk', 200, None) + + with DefaultEventProcessor(config) as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the + # HTTP client, so we should be able to see the request go through. Note that the URI path will + # actually be an absolute URI for a proxy request. + req = server.require_request() + assert req.method == 'POST' + +def test_can_use_https_proxy_via_environment_var(monkeypatch): + fake_events_uri = 'https://not-real' + + with start_server() as server: + monkeypatch.setenv('https_proxy', server.uri) + config = Config(sdk_key = 'sdk-key', events_uri = fake_events_uri) + server.setup_response(fake_events_uri + '/bulk', 200, None) + + with DefaultEventProcessor(config) as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but + # it can still record that it *got* the request, which proves that the request went to the proxy. + req = server.require_request() + assert req.method == 'CONNECT' + def verify_unrecoverable_http_error(status): with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: mock_http.set_response_status(status) diff --git a/testing/test_feature_requester.py b/testing/test_feature_requester.py index 569f1ef9..45239567 100644 --- a/testing/test_feature_requester.py +++ b/testing/test_feature_requester.py @@ -125,3 +125,41 @@ def test_get_one_flag_does_not_use_etags(): assert result == flag_data req = server.require_request() assert 'If-None-Match' not in req.headers.keys() # did not send etag from previous request + +def test_can_use_http_proxy_via_environment_var(monkeypatch): + fake_base_uri = 'http://not-real' + with start_server() as server: + monkeypatch.setenv('http_proxy', server.uri) + config = Config(sdk_key = 'sdk-key', base_uri = fake_base_uri) + fr = FeatureRequesterImpl(config) + + resp_data = { 'flags': {}, 'segments': {} } + expected_data = { FEATURES: {}, SEGMENTS: {} } + server.setup_json_response(fake_base_uri + '/sdk/latest-all', resp_data) + + # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the + # HTTP client, so we should be able to see the request go through. Note that the URI path will + # actually be an absolute URI for a proxy request. + result = fr.get_all_data() + assert result == expected_data + req = server.require_request() + assert req.method == 'GET' + +def test_can_use_https_proxy_via_environment_var(monkeypatch): + fake_base_uri = 'https://not-real' + with start_server() as server: + monkeypatch.setenv('https_proxy', server.uri) + config = Config(sdk_key = 'sdk-key', base_uri = fake_base_uri) + fr = FeatureRequesterImpl(config) + + resp_data = { 'flags': {}, 'segments': {} } + server.setup_json_response(fake_base_uri + '/sdk/latest-all', resp_data) + + # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but + # it can still record that it *got* the request, which proves that the request went to the proxy. + try: + fr.get_all_data() + except: + pass + req = server.require_request() + assert req.method == 'CONNECT' diff --git a/testing/test_streaming.py b/testing/test_streaming.py new file mode 100644 index 00000000..ba2899c0 --- /dev/null +++ b/testing/test_streaming.py @@ -0,0 +1,81 @@ +from threading import Event + +from ldclient.config import Config +from ldclient.feature_store import InMemoryFeatureStore +from ldclient.streaming import StreamingUpdateProcessor +from ldclient.version import VERSION +from testing.http_util import start_server + + +fake_event = 'event:put\ndata: {"data":{"flags":{},"segments":{}}}\n\n' + +# Note that our simple HTTP stub server implementation does not actually do streaming responses, so +# in these tests the connection will get closed after the response, causing the streaming processor +# to reconnect. For the purposes of the current tests, that's OK because we only care that the initial +# request and response were handled correctly. + +def test_uses_stream_uri(): + store = InMemoryFeatureStore() + ready = Event() + + with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = server.uri) + server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + + with StreamingUpdateProcessor(config, None, store, ready) as sp: + sp.start() + req = server.await_request() + assert req.method == 'GET' + ready.wait(1) + assert sp.initialized() + +def test_sends_headers(): + store = InMemoryFeatureStore() + ready = Event() + + with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = server.uri) + server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + + with StreamingUpdateProcessor(config, None, store, ready) as sp: + sp.start() + req = server.await_request() + assert req.headers['Authorization'] == 'sdk-key' + assert req.headers['User-Agent'] == 'PythonClient/' + VERSION + +def test_can_use_http_proxy_via_environment_var(monkeypatch): + store = InMemoryFeatureStore() + ready = Event() + fake_stream_uri = 'http://not-real' + + with start_server() as server: + monkeypatch.setenv('http_proxy', server.uri) + config = Config(sdk_key = 'sdk-key', stream_uri = fake_stream_uri) + server.setup_response(fake_stream_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + + with StreamingUpdateProcessor(config, None, store, ready) as sp: + sp.start() + # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the + # HTTP client, so we should be able to see the request go through. Note that the URI path will + # actually be an absolute URI for a proxy request. + req = server.await_request() + assert req.method == 'GET' + ready.wait(1) + assert sp.initialized() + +def test_can_use_https_proxy_via_environment_var(monkeypatch): + store = InMemoryFeatureStore() + ready = Event() + fake_stream_uri = 'https://not-real' + + with start_server() as server: + monkeypatch.setenv('https_proxy', server.uri) + config = Config(sdk_key = 'sdk-key', stream_uri = fake_stream_uri) + server.setup_response(fake_stream_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + + with StreamingUpdateProcessor(config, None, store, ready) as sp: + sp.start() + # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but + # it can still record that it *got* the request, which proves that the request went to the proxy. + req = server.await_request() + assert req.method == 'CONNECT' From ae764b55ae2c2089289a207a2eee4b9d1fb1181f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 28 Oct 2019 17:23:23 -0700 Subject: [PATCH 136/190] rm debugging --- ldclient/streaming.py | 3 --- ldclient/util.py | 4 ---- testing/http_util.py | 1 - 3 files changed, 8 deletions(-) diff --git a/ldclient/streaming.py b/ldclient/streaming.py index d5a2375b..75a56840 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -56,14 +56,11 @@ def run(self): for msg in messages: if not self._running: break - print('*** msg: %s' % msg.event) message_ok = self.process_message(self._store, self._requester, msg) if message_ok is True and self._ready.is_set() is False: - print('*** inited') log.info("StreamingUpdateProcessor initialized ok.") self._ready.set() except UnsuccessfulResponseException as e: - print('*** nope: %s' % e) log.error(http_error_message(e.status, "stream connection")) if not is_http_error_recoverable(e.status): self._ready.set() # if client is initializing, make it stop waiting; has no effect if already inited diff --git a/ldclient/util.py b/ldclient/util.py index 391a5ed6..98ad4357 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -90,21 +90,17 @@ def create_http_pool_manager(num_pools=1, verify_ssl=False, target_base_uri=None if not verify_ssl: if proxy_url is None: - print("no proxy for %s" % target_base_uri) return urllib3.PoolManager(num_pools=num_pools) else: - print("the proxy is %s for %s" % (proxy_url, target_base_uri)) return urllib3.ProxyManager(proxy_url, num_pools=num_pools) if proxy_url is None: - print("no proxy for %s" % target_base_uri) return urllib3.PoolManager( num_pools=num_pools, cert_reqs='CERT_REQUIRED', ca_certs=certifi.where() ) else: - print("the proxy is %s for %s" % (proxy_url, target_base_uri)) return urllib3.ProxyManager( proxy_url, num_pools=num_pools, diff --git a/testing/http_util.py b/testing/http_util.py index 333eeac6..a232f9e0 100644 --- a/testing/http_util.py +++ b/testing/http_util.py @@ -65,7 +65,6 @@ def do_POST(self): self._do_request() def _do_request(self): - print('*** %s %s' % (self.command, self.path)) server_wrapper = self.server.server_wrapper server_wrapper.requests.put(MockServerRequest(self.command, self.path, self.headers)) if self.path in server_wrapper.matchers: From 28ee4b580436fd4ebccdeded65167809308dccbe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 29 Oct 2019 16:45:31 -0700 Subject: [PATCH 137/190] fix autodoc options to exclude magic methods --- docs/api-main.rst | 9 +-------- docs/conf.py | 6 ++---- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/docs/api-main.rst b/docs/api-main.rst index 56417ea5..1a5af4a1 100644 --- a/docs/api-main.rst +++ b/docs/api-main.rst @@ -6,35 +6,28 @@ ldclient module .. automodule:: ldclient :members: get,set_config,set_sdk_key - :show-inheritance: ldclient.client module ---------------------- .. automodule:: ldclient.client :members: LDClient - :special-members: __init__ - :show-inheritance: ldclient.config module ---------------------- .. automodule:: ldclient.config :members: - :special-members: __init__ - :show-inheritance: ldclient.flag module -------------------- .. automodule:: ldclient.flag :members: EvaluationDetail - :special-members: __init__ - :show-inheritance: ldclient.flags_state module --------------------------- .. automodule:: ldclient.flags_state :members: - :show-inheritance: + :exclude-members: __init__, add_flag diff --git a/docs/conf.py b/docs/conf.py index 9e3db965..b93d3c36 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -167,8 +167,6 @@ # -- Extension configuration ------------------------------------------------- autodoc_default_options = { - 'members': None, - 'show-inheritance': None, - 'special-members': None, - 'undoc-members': None + 'special-members': '__init__', + 'undoc-members': False } From 4fc6ce797fc6c975515a85ad1733060a9698e3b7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 30 Oct 2019 17:12:06 -0700 Subject: [PATCH 138/190] comment --- ldclient/config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ldclient/config.py b/ldclient/config.py index 23cc77b5..44da64f0 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -99,7 +99,8 @@ def __init__(self, :param http_proxy: Use a proxy when connecting to LaunchDarkly. This is the full URI of the proxy; for example: http://my-proxy.com:1234. Note that unlike the standard `http_proxy` environment variable, this is used regardless of whether the target URI is HTTP or HTTPS (the actual LaunchDarkly - service uses HTTPS, but a Relay Proxy instance could use HTTP). + service uses HTTPS, but a Relay Proxy instance could use HTTP). Setting this Config parameter will + override any proxy specified by an environment variable, but only for LaunchDarkly SDK connections. """ self.__sdk_key = sdk_key From 00432bede3b70d75f2205ce5a4368c390fd9cbed Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Oct 2019 11:23:47 -0700 Subject: [PATCH 139/190] add end-to-end unit tests for proxy config --- testing/test_event_processor.py | 50 ++++++++++++--------- testing/test_feature_requester.py | 72 ++++++++++++++++++------------- testing/test_streaming.py | 66 ++++++++++++++++------------ 3 files changed, 111 insertions(+), 77 deletions(-) diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index 44ed3609..550c0789 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -469,33 +469,43 @@ def start_consuming_events(): assert had_no_more def test_can_use_http_proxy_via_environment_var(monkeypatch): - fake_events_uri = 'http://not-real' - with start_server() as server: monkeypatch.setenv('http_proxy', server.uri) - config = Config(sdk_key = 'sdk-key', events_uri = fake_events_uri) - server.setup_response(fake_events_uri + '/bulk', 200, None) - - with DefaultEventProcessor(config) as ep: - ep.send_event({ 'kind': 'identify', 'user': user }) - ep.flush() - ep._wait_until_inactive() - - # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the - # HTTP client, so we should be able to see the request go through. Note that the URI path will - # actually be an absolute URI for a proxy request. - req = server.require_request() - assert req.method == 'POST' + config = Config(sdk_key = 'sdk-key', events_uri = 'http://not-real') + _verify_http_proxy_is_used(server, config) def test_can_use_https_proxy_via_environment_var(monkeypatch): - fake_events_uri = 'https://not-real' - with start_server() as server: monkeypatch.setenv('https_proxy', server.uri) - config = Config(sdk_key = 'sdk-key', events_uri = fake_events_uri) - server.setup_response(fake_events_uri + '/bulk', 200, None) + config = Config(sdk_key = 'sdk-key', events_uri = 'https://not-real') + _verify_https_proxy_is_used(server, config) + +def test_can_use_http_proxy_via_config(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', events_uri = 'http://not-real', http_proxy=server.uri) + _verify_http_proxy_is_used(server, config) + +def test_can_use_https_proxy_via_config(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', events_uri = 'https://not-real', http_proxy=server.uri) + _verify_https_proxy_is_used(server, config) + +def _verify_http_proxy_is_used(server, config): + server.setup_response(config.events_uri + '/bulk', 200, None) + with DefaultEventProcessor(config) as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the + # HTTP client, so we should be able to see the request go through. Note that the URI path will + # actually be an absolute URI for a proxy request. + req = server.require_request() + assert req.method == 'POST' - with DefaultEventProcessor(config) as ep: +def _verify_https_proxy_is_used(server, config): + server.setup_response(config.events_uri + '/bulk', 200, None) + with DefaultEventProcessor(config) as ep: ep.send_event({ 'kind': 'identify', 'user': user }) ep.flush() ep._wait_until_inactive() diff --git a/testing/test_feature_requester.py b/testing/test_feature_requester.py index 45239567..658c8157 100644 --- a/testing/test_feature_requester.py +++ b/testing/test_feature_requester.py @@ -127,39 +127,53 @@ def test_get_one_flag_does_not_use_etags(): assert 'If-None-Match' not in req.headers.keys() # did not send etag from previous request def test_can_use_http_proxy_via_environment_var(monkeypatch): - fake_base_uri = 'http://not-real' with start_server() as server: monkeypatch.setenv('http_proxy', server.uri) - config = Config(sdk_key = 'sdk-key', base_uri = fake_base_uri) - fr = FeatureRequesterImpl(config) - - resp_data = { 'flags': {}, 'segments': {} } - expected_data = { FEATURES: {}, SEGMENTS: {} } - server.setup_json_response(fake_base_uri + '/sdk/latest-all', resp_data) - - # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the - # HTTP client, so we should be able to see the request go through. Note that the URI path will - # actually be an absolute URI for a proxy request. - result = fr.get_all_data() - assert result == expected_data - req = server.require_request() - assert req.method == 'GET' + config = Config(sdk_key = 'sdk-key', base_uri = 'http://not-real') + _verify_http_proxy_is_used(server, config) def test_can_use_https_proxy_via_environment_var(monkeypatch): - fake_base_uri = 'https://not-real' with start_server() as server: monkeypatch.setenv('https_proxy', server.uri) - config = Config(sdk_key = 'sdk-key', base_uri = fake_base_uri) - fr = FeatureRequesterImpl(config) + config = Config(sdk_key = 'sdk-key', base_uri = 'https://not-real') + _verify_https_proxy_is_used(server, config) - resp_data = { 'flags': {}, 'segments': {} } - server.setup_json_response(fake_base_uri + '/sdk/latest-all', resp_data) - - # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but - # it can still record that it *got* the request, which proves that the request went to the proxy. - try: - fr.get_all_data() - except: - pass - req = server.require_request() - assert req.method == 'CONNECT' +def test_can_use_http_proxy_via_config(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = 'http://not-real', http_proxy = server.uri) + _verify_http_proxy_is_used(server, config) + +def test_can_use_https_proxy_via_config(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = 'https://not-real', http_proxy = server.uri) + _verify_https_proxy_is_used(server, config) + +def _verify_http_proxy_is_used(server, config): + fr = FeatureRequesterImpl(config) + + resp_data = { 'flags': {}, 'segments': {} } + expected_data = { FEATURES: {}, SEGMENTS: {} } + server.setup_json_response(config.base_uri + '/sdk/latest-all', resp_data) + + # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the + # HTTP client, so we should be able to see the request go through. Note that the URI path will + # actually be an absolute URI for a proxy request. + result = fr.get_all_data() + assert result == expected_data + req = server.require_request() + assert req.method == 'GET' + +def _verify_https_proxy_is_used(server, config): + fr = FeatureRequesterImpl(config) + + resp_data = { 'flags': {}, 'segments': {} } + server.setup_json_response(config.base_uri + '/sdk/latest-all', resp_data) + + # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but + # it can still record that it *got* the request, which proves that the request went to the proxy. + try: + fr.get_all_data() + except: + pass + req = server.require_request() + assert req.method == 'CONNECT' \ No newline at end of file diff --git a/testing/test_streaming.py b/testing/test_streaming.py index ba2899c0..65ba0542 100644 --- a/testing/test_streaming.py +++ b/testing/test_streaming.py @@ -44,38 +44,48 @@ def test_sends_headers(): assert req.headers['User-Agent'] == 'PythonClient/' + VERSION def test_can_use_http_proxy_via_environment_var(monkeypatch): - store = InMemoryFeatureStore() - ready = Event() - fake_stream_uri = 'http://not-real' - with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = 'http://not-real') monkeypatch.setenv('http_proxy', server.uri) - config = Config(sdk_key = 'sdk-key', stream_uri = fake_stream_uri) - server.setup_response(fake_stream_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) - - with StreamingUpdateProcessor(config, None, store, ready) as sp: - sp.start() - # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the - # HTTP client, so we should be able to see the request go through. Note that the URI path will - # actually be an absolute URI for a proxy request. - req = server.await_request() - assert req.method == 'GET' - ready.wait(1) - assert sp.initialized() + _verify_http_proxy_is_used(server, config) def test_can_use_https_proxy_via_environment_var(monkeypatch): - store = InMemoryFeatureStore() - ready = Event() - fake_stream_uri = 'https://not-real' - with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = 'https://not-real') monkeypatch.setenv('https_proxy', server.uri) - config = Config(sdk_key = 'sdk-key', stream_uri = fake_stream_uri) - server.setup_response(fake_stream_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + _verify_https_proxy_is_used(server, config) - with StreamingUpdateProcessor(config, None, store, ready) as sp: - sp.start() - # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but - # it can still record that it *got* the request, which proves that the request went to the proxy. - req = server.await_request() - assert req.method == 'CONNECT' +def test_can_use_http_proxy_via_config(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = 'http://not-real', http_proxy=server.uri) + _verify_http_proxy_is_used(server, config) + +def test_can_use_https_proxy_via_config(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = 'https://not-real', http_proxy=server.uri) + _verify_https_proxy_is_used(server, config) + +def _verify_http_proxy_is_used(server, config): + store = InMemoryFeatureStore() + ready = Event() + server.setup_response(config.stream_base_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + with StreamingUpdateProcessor(config, None, store, ready) as sp: + sp.start() + # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the + # HTTP client, so we should be able to see the request go through. Note that the URI path will + # actually be an absolute URI for a proxy request. + req = server.await_request() + assert req.method == 'GET' + ready.wait(1) + assert sp.initialized() + +def _verify_https_proxy_is_used(server, config): + store = InMemoryFeatureStore() + ready = Event() + server.setup_response(config.stream_base_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + with StreamingUpdateProcessor(config, None, store, ready) as sp: + sp.start() + # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but + # it can still record that it *got* the request, which proves that the request went to the proxy. + req = server.await_request() + assert req.method == 'CONNECT' From 5911fd9afb63fc7774f65928cdd83524dab59a54 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 31 Oct 2019 12:05:31 -0700 Subject: [PATCH 140/190] indents --- testing/test_event_processor.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index 550c0789..75093a3d 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -506,14 +506,14 @@ def _verify_http_proxy_is_used(server, config): def _verify_https_proxy_is_used(server, config): server.setup_response(config.events_uri + '/bulk', 200, None) with DefaultEventProcessor(config) as ep: - ep.send_event({ 'kind': 'identify', 'user': user }) - ep.flush() - ep._wait_until_inactive() - - # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but - # it can still record that it *got* the request, which proves that the request went to the proxy. - req = server.require_request() - assert req.method == 'CONNECT' + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but + # it can still record that it *got* the request, which proves that the request went to the proxy. + req = server.require_request() + assert req.method == 'CONNECT' def verify_unrecoverable_http_error(status): with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: From 63125f56d1b21638d80dedbaf016c8579c178428 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 17:11:22 -0800 Subject: [PATCH 141/190] add 3.8 build --- .circleci/config.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 46e2166e..6cfbc616 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,6 +9,7 @@ workflows: - test-3.5 - test-3.6 - test-3.7 + - test-3.8 test-template: &test-template steps: - checkout @@ -92,3 +93,10 @@ jobs: - image: redis - image: amazon/dynamodb-local - image: consul + test-3.8: + <<: *test-template + docker: + - image: circleci/python:3.8-stretch + - image: redis + - image: amazon/dynamodb-local + - image: consul From 3c68cd20e1cffc41929657b6d7c12237f3bb68ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 17:13:12 -0800 Subject: [PATCH 142/190] image name --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6cfbc616..feb6d110 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -96,7 +96,7 @@ jobs: test-3.8: <<: *test-template docker: - - image: circleci/python:3.8-stretch + - image: circleci/python:3.8-buster - image: redis - image: amazon/dynamodb-local - image: consul From 9b1adf32780d5b2695278ffef975bea481ae7936 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 17:25:56 -0800 Subject: [PATCH 143/190] fail on SyntaxWarning --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index feb6d110..1523d759 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -31,10 +31,10 @@ test-template: &test-template command: | mkdir test-reports; if [[ "$CIRCLE_JOB" == "test-2.7" ]]; then - pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing; + python -W error:SyntaxWarning -m pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing; sh -c '[ -n "${CODECLIMATE_REPO_TOKEN+1}" ] && codeclimate-test-reporter || echo "No CODECLIMATE_REPO_TOKEN value is set; not publishing coverage report"'; else - pytest -s --junitxml=test-reports/junit.xml testing; + pytest -W error:SyntaxWarning -m pytest -s --junitxml=test-reports/junit.xml testing; fi - run: name: test packaging/install From 6a954e344d3ceed2bd057175e35f314d1f283792 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 17:29:37 -0800 Subject: [PATCH 144/190] typo --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1523d759..861f05a8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -31,10 +31,10 @@ test-template: &test-template command: | mkdir test-reports; if [[ "$CIRCLE_JOB" == "test-2.7" ]]; then - python -W error:SyntaxWarning -m pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing; + python -W error::SyntaxWarning -m pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing; sh -c '[ -n "${CODECLIMATE_REPO_TOKEN+1}" ] && codeclimate-test-reporter || echo "No CODECLIMATE_REPO_TOKEN value is set; not publishing coverage report"'; else - pytest -W error:SyntaxWarning -m pytest -s --junitxml=test-reports/junit.xml testing; + pytest -W error::SyntaxWarning -m pytest -s --junitxml=test-reports/junit.xml testing; fi - run: name: test packaging/install From d6bf44c6308b9cbe88ac4c8f3bdd74931a9f3f11 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 17:33:54 -0800 Subject: [PATCH 145/190] command syntax --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 861f05a8..2aa451e9 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -31,10 +31,10 @@ test-template: &test-template command: | mkdir test-reports; if [[ "$CIRCLE_JOB" == "test-2.7" ]]; then - python -W error::SyntaxWarning -m pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing; + pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing -W error::SyntaxWarning; sh -c '[ -n "${CODECLIMATE_REPO_TOKEN+1}" ] && codeclimate-test-reporter || echo "No CODECLIMATE_REPO_TOKEN value is set; not publishing coverage report"'; else - pytest -W error::SyntaxWarning -m pytest -s --junitxml=test-reports/junit.xml testing; + pytest -s --junitxml=test-reports/junit.xml testing -W error::SyntaxWarning; fi - run: name: test packaging/install From 7b3177fb8961dc61ca4a0336997f7ec1e0eca538 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 18:56:37 -0800 Subject: [PATCH 146/190] pin expiringdict dependency for Python 3.3 compatibility --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 2e3cba6f..f941d6ab 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ backoff>=1.4.3 certifi>=2018.4.16 -expiringdict>=1.1.4 +expiringdict>=1.1.4,<1.2.0 six>=1.10.0 pyRFC3339>=1.0 semver>=2.7.9 From 9942d77357bf557430e00875a8c32b7b3be72a4c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:06:15 -0800 Subject: [PATCH 147/190] add Windows CircleCI job --- .circleci/config.yml | 64 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 62 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2aa451e9..68a6122b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,6 +1,9 @@ -version: 2 +version: 2.1 + +orbs: + win: circleci/windows@1.0.0 + workflows: - version: 2 test: jobs: - test-2.7 @@ -10,6 +13,8 @@ workflows: - test-3.6 - test-3.7 - test-3.8 + - test-windows + test-template: &test-template steps: - checkout @@ -100,3 +105,58 @@ jobs: - image: redis - image: amazon/dynamodb-local - image: consul + + test-windows: + executor: + name: win/vs2019 + shell: powershell.exe + steps: + - checkout + - run: + name: set up DynamoDB + command: | + $ProgressPreference = "SilentlyContinue" # prevents console errors from CircleCI host + iwr -outf dynamo.zip https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.zip + mkdir dynamo + Expand-Archive -Path dynamo.zip -DestinationPath dynamo + cd dynamo + javaw -D"java.library.path=./DynamoDBLocal_lib" -jar DynamoDBLocal.jar + background: true + - run: + name: set up Consul + command: | + $ProgressPreference = "SilentlyContinue" + iwr -outf consul.zip https://releases.hashicorp.com/consul/1.4.2/consul_1.4.2_windows_amd64.zip + mkdir consul + Expand-Archive -Path consul.zip -DestinationPath consul + cd consul + sc.exe create "Consul" binPath="$(System.DefaultWorkingDirectory)/consul/consul.exe agent -dev" + sc.exe start "Consul" + - run: + name: start Redis + command: | + $ProgressPreference = "SilentlyContinue" + iwr -outf redis.zip https://github.com/MicrosoftArchive/redis/releases/download/win-3.0.504/Redis-x64-3.0.504.zip + mkdir redis + Expand-Archive -Path redis.zip -DestinationPath redis + cd redis + ./redis-server --service-install + ./redis-server --service-start + Start-Sleep -s 5 + ./redis-cli ping + - run: python --version + - run: + name: install requirements + command: | + pip install -r test-requirements.txt + pip install -r consul-requirements.txt + python setup.py install + - run: + name: run tests + command: | + mkdir test-reports + python -m pytest -s --junitxml=test-reports/junit.xml testing; + - store_test_results: + path: test-reports + - store_artifacts: + path: test-reports From 38f3f433036d9d43cb4fab6a28ae7735951c68eb Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:09:14 -0800 Subject: [PATCH 148/190] periods are no longer valid in CircleCI job names --- .circleci/config.yml | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 68a6122b..4a46a82a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -6,13 +6,13 @@ orbs: workflows: test: jobs: - - test-2.7 - - test-3.3 - - test-3.4 - - test-3.5 - - test-3.6 - - test-3.7 - - test-3.8 + - test-2-7 + - test-3-3 + - test-3-4 + - test-3-5 + - test-3-6 + - test-3-7 + - test-3-8 - test-windows test-template: &test-template @@ -56,49 +56,49 @@ test-template: &test-template path: test-reports jobs: - test-2.7: + test-2-7: <<: *test-template docker: - image: circleci/python:2.7-jessie - image: redis - image: amazon/dynamodb-local - image: consul - test-3.3: + test-3-3: <<: *test-template docker: - image: circleci/python:3.3-jessie - image: redis - image: amazon/dynamodb-local # python-consul doesn't support Python 3.3 - test-3.4: + test-3-4: <<: *test-template docker: - image: circleci/python:3.4-jessie - image: redis - image: amazon/dynamodb-local # python-consul doesn't support Python 3.4 - test-3.5: + test-3-5: <<: *test-template docker: - image: circleci/python:3.5-jessie - image: redis - image: amazon/dynamodb-local - image: consul - test-3.6: + test-3-6: <<: *test-template docker: - image: circleci/python:3.6-jessie - image: redis - image: amazon/dynamodb-local - image: consul - test-3.7: + test-3-7: <<: *test-template docker: - image: circleci/python:3.7-stretch - image: redis - image: amazon/dynamodb-local - image: consul - test-3.8: + test-3-8: <<: *test-template docker: - image: circleci/python:3.8-buster From c969db23ed1ddc8f559b2991098ba19119a3c646 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:11:13 -0800 Subject: [PATCH 149/190] syntax fix --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4a46a82a..8861dfa1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -130,7 +130,7 @@ jobs: mkdir consul Expand-Archive -Path consul.zip -DestinationPath consul cd consul - sc.exe create "Consul" binPath="$(System.DefaultWorkingDirectory)/consul/consul.exe agent -dev" + sc.exe create "Consul" binPath="$(Get-Location)/consul.exe agent -dev" sc.exe start "Consul" - run: name: start Redis From bc31ec9b1bc517b5468e8d531c2c16ece2ea0940 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:17:46 -0800 Subject: [PATCH 150/190] install Python in Windows --- .circleci/config.yml | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8861dfa1..f0f80d23 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -113,9 +113,15 @@ jobs: steps: - checkout - run: - name: set up DynamoDB + name: install Python 2.7 command: | $ProgressPreference = "SilentlyContinue" # prevents console errors from CircleCI host + iwr -outf python-2.7.16.amd64.msi https://www.python.org/ftp/python/2.7.16/python-2.7.16.amd64.msi + Start-Process msiexec.exe -Wait -ArgumentList '/I python-2.7.16.amd64.msi /quiet' + - run: + name: set up DynamoDB + command: | + $ProgressPreference = "SilentlyContinue" iwr -outf dynamo.zip https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.zip mkdir dynamo Expand-Archive -Path dynamo.zip -DestinationPath dynamo From 64486a3ebbcc3e919e90e0f92aa758bc3b64cc59 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:26:35 -0800 Subject: [PATCH 151/190] set path --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index f0f80d23..fc3eae3e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -154,6 +154,7 @@ jobs: - run: name: install requirements command: | + $env:Path += ";C:\Python27\;C:\Python27\Scripts\" pip install -r test-requirements.txt pip install -r consul-requirements.txt python setup.py install @@ -161,6 +162,7 @@ jobs: name: run tests command: | mkdir test-reports + $env:Path += ";C:\Python27\;C:\Python27\Scripts\" python -m pytest -s --junitxml=test-reports/junit.xml testing; - store_test_results: path: test-reports From 37509ffb49003698eff367d6b011a6d20a881bf9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:28:50 -0800 Subject: [PATCH 152/190] move command --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fc3eae3e..b8f88903 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -150,11 +150,11 @@ jobs: ./redis-server --service-start Start-Sleep -s 5 ./redis-cli ping - - run: python --version - run: name: install requirements command: | $env:Path += ";C:\Python27\;C:\Python27\Scripts\" + python --version pip install -r test-requirements.txt pip install -r consul-requirements.txt python setup.py install From 3b41766dc4c4d2aa313567d08265edcf45fccea7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:32:07 -0800 Subject: [PATCH 153/190] turn off debug logging --- testing/test_init.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testing/test_init.py b/testing/test_init.py index 16d67b6c..2819bbcc 100644 --- a/testing/test_init.py +++ b/testing/test_init.py @@ -3,7 +3,7 @@ import ldclient from ldclient import Config -logging.basicConfig(level=logging.DEBUG) +logging.basicConfig(level=logging.WARN) mylogger = logging.getLogger() From ef680582a4033ce685c6f8cc760d88eb29c09969 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:37:11 -0800 Subject: [PATCH 154/190] Py3 in Windows --- .circleci/config.yml | 35 ++++++++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b8f88903..788aa99d 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -13,7 +13,12 @@ workflows: - test-3-6 - test-3-7 - test-3-8 - - test-windows + - test-windows: + name: Windows - Py2.7 + py3: false + - test-windows: + name: Windows - Py3 + py3: true test-template: &test-template steps: @@ -112,12 +117,24 @@ jobs: shell: powershell.exe steps: - checkout + - when: + condition: <> + steps: + - run: + name: install Python 3 + command: choco install python + - unless: + condition: <> + steps: + - run: + name: install Python 2.7 + command: | + $ProgressPreference = "SilentlyContinue" # prevents console errors from CircleCI host + iwr -outf python-2.7.16.amd64.msi https://www.python.org/ftp/python/2.7.16/python-2.7.16.amd64.msi + Start-Process msiexec.exe -Wait -ArgumentList '/I python-2.7.16.amd64.msi /quiet' - run: - name: install Python 2.7 - command: | - $ProgressPreference = "SilentlyContinue" # prevents console errors from CircleCI host - iwr -outf python-2.7.16.amd64.msi https://www.python.org/ftp/python/2.7.16/python-2.7.16.amd64.msi - Start-Process msiexec.exe -Wait -ArgumentList '/I python-2.7.16.amd64.msi /quiet' + name: install Python 3 + command: choco install python - run: name: set up DynamoDB command: | @@ -153,16 +170,16 @@ jobs: - run: name: install requirements command: | - $env:Path += ";C:\Python27\;C:\Python27\Scripts\" + $env:Path += ";C:\Python27\;C:\Python27\Scripts\" # has no effect if 2.7 isn't installed python --version pip install -r test-requirements.txt pip install -r consul-requirements.txt python setup.py install - run: - name: run tests + name: run tests (2.7) command: | mkdir test-reports - $env:Path += ";C:\Python27\;C:\Python27\Scripts\" + $env:Path += ";C:\Python27\;C:\Python27\Scripts\" # has no effect if 2.7 isn't installed python -m pytest -s --junitxml=test-reports/junit.xml testing; - store_test_results: path: test-reports From 0c93df7f14b9b782978facaad54d35eb1360db8b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:37:42 -0800 Subject: [PATCH 155/190] config param --- .circleci/config.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 788aa99d..e2a87c38 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -115,6 +115,9 @@ jobs: executor: name: win/vs2019 shell: powershell.exe + parameters: + py3: + type: boolean steps: - checkout - when: From 86d27a87691a28f783be69cf99c2530e61e74d18 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:40:16 -0800 Subject: [PATCH 156/190] rm redundant step --- .circleci/config.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e2a87c38..fe3f9c01 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -135,9 +135,6 @@ jobs: $ProgressPreference = "SilentlyContinue" # prevents console errors from CircleCI host iwr -outf python-2.7.16.amd64.msi https://www.python.org/ftp/python/2.7.16/python-2.7.16.amd64.msi Start-Process msiexec.exe -Wait -ArgumentList '/I python-2.7.16.amd64.msi /quiet' - - run: - name: install Python 3 - command: choco install python - run: name: set up DynamoDB command: | From 001e1968189239577814b92ad8d18276e18dbf26 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 19:42:52 -0800 Subject: [PATCH 157/190] choco switch --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index fe3f9c01..e2c98ce5 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -125,7 +125,7 @@ jobs: steps: - run: name: install Python 3 - command: choco install python + command: choco install python --no-progress - unless: condition: <> steps: From 23a42229a4f56552cca7d5a5b2dcaf2f288c4208 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 21:17:37 -0800 Subject: [PATCH 158/190] refactor Linux jobs using CircleCI 2.1 features --- .circleci/config.yml | 183 +++++++++++++++++++++---------------------- 1 file changed, 89 insertions(+), 94 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e2c98ce5..2920bc7e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -6,110 +6,105 @@ orbs: workflows: test: jobs: - - test-2-7 - - test-3-3 - - test-3-4 - - test-3-5 - - test-3-6 - - test-3-7 - - test-3-8 + - test-linux: + name: Python 2.7 + docker-image: circleci/python:2.7-jessie + test-with-codeclimate: true # we only need to run CodeClimate in one job + - test-linux: + name: Python 3.3 + docker-image: circleci/python:3.3-jessie + consul-supported: false # Consul isn't supported in 3.3 + filesource-supported: false # FileDataSource isn't supported in 3.3 + test-packaging: false # packaging test requires virtualenv, which isn't supported in 3.3 + - test-linux: + name: Python 3.4 + docker-image: circleci/python:3.4-jessie + consul-supported: false # Consul isn't supported in 3.4 + - test-linux: + name: Python 3.5 + docker-image: circleci/python:3.5-jessie + - test-linux: + name: Python 3.6 + docker-image: circleci/python:3.6-jessie + - test-linux: + name: Python 3.7 + docker-image: circleci/python:3.7-stretch + - test-linux: + name: Python 3.8 + docker-image: circleci/python:3.8-buster - test-windows: - name: Windows - Py2.7 + name: Windows Py2.7 py3: false - test-windows: - name: Windows - Py3 + name: Windows Py3.3 py3: true -test-template: &test-template - steps: - - checkout - - run: - name: install requirements - command: | - sudo pip install --upgrade pip virtualenv; - sudo pip install -r test-requirements.txt; - if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then - sudo pip install -r test-filesource-optional-requirements.txt; - fi; - if [[ "$CIRCLE_JOB" != "test-3.3" ]] && [[ "$CIRCLE_JOB" != "test-3.4" ]]; then - sudo pip install -r consul-requirements.txt; - fi; - sudo python setup.py install; - pip freeze - - run: - name: run tests - command: | - mkdir test-reports; - if [[ "$CIRCLE_JOB" == "test-2.7" ]]; then - pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing -W error::SyntaxWarning; - sh -c '[ -n "${CODECLIMATE_REPO_TOKEN+1}" ] && codeclimate-test-reporter || echo "No CODECLIMATE_REPO_TOKEN value is set; not publishing coverage report"'; - else - pytest -s --junitxml=test-reports/junit.xml testing -W error::SyntaxWarning; - fi - - run: - name: test packaging/install - # Note, virtualenv isn't supported on Python 3.3 and this test requires virtualenv. But we - # never build our published package on 3.3 anyway. - command: | - if [[ "$CIRCLE_JOB" != "test-3.3" ]]; then - sudo rm -rf dist *.egg-info; - ./test-packaging/test-packaging.sh; - fi - - store_test_results: - path: test-reports - - store_artifacts: - path: test-reports - jobs: - test-2-7: - <<: *test-template - docker: - - image: circleci/python:2.7-jessie - - image: redis - - image: amazon/dynamodb-local - - image: consul - test-3-3: - <<: *test-template - docker: - - image: circleci/python:3.3-jessie - - image: redis - - image: amazon/dynamodb-local - # python-consul doesn't support Python 3.3 - test-3-4: - <<: *test-template - docker: - - image: circleci/python:3.4-jessie - - image: redis - - image: amazon/dynamodb-local - # python-consul doesn't support Python 3.4 - test-3-5: - <<: *test-template - docker: - - image: circleci/python:3.5-jessie - - image: redis - - image: amazon/dynamodb-local - - image: consul - test-3-6: - <<: *test-template - docker: - - image: circleci/python:3.6-jessie - - image: redis - - image: amazon/dynamodb-local - - image: consul - test-3-7: - <<: *test-template - docker: - - image: circleci/python:3.7-stretch - - image: redis - - image: amazon/dynamodb-local - - image: consul - test-3-8: - <<: *test-template + test-linux: + parameters: + docker-image: + type: string + consul-supported: + type: boolean + default: true + filesource-supported: + type: boolean + default: true + test-packaging: + type: boolean + default: true + test-with-codeclimate: + type: boolean + default: false docker: - - image: circleci/python:3.8-buster + - image: <> - image: redis - image: amazon/dynamodb-local - image: consul + steps: + - checkout + - run: + name: install requirements + command: | + sudo pip install --upgrade pip virtualenv; + sudo pip install -r test-requirements.txt; + if [[ "<>" == "true" ]]; then + sudo pip install -r test-filesource-optional-requirements.txt; + fi; + if [[ "<>" == "true" ]]; then + sudo pip install -r consul-requirements.txt; + fi; + sudo python setup.py install; + pip freeze + - when: + condition: <> + steps: + - run: + name: run tests (with CodeClimate) + command: | + mkdir test-reports + pytest -s --cov=ldclient --junitxml=test-reports/junit.xml testing -W error::SyntaxWarning + sh -c '[ -n "${CODECLIMATE_REPO_TOKEN+1}" ] && codeclimate-test-reporter || echo "No CODECLIMATE_REPO_TOKEN value is set; not publishing coverage report"' + - unless: + condition: <> + steps: + - run: + name: run tests + command: | + mkdir test-reports + pytest -s --junitxml=test-reports/junit.xml testing -W error::SyntaxWarning + - when: + condition: <> + steps: + - run: + name: test packaging/install + command: | + sudo rm -rf dist *.egg-info + ./test-packaging/test-packaging.sh + - store_test_results: + path: test-reports + - store_artifacts: + path: test-reports test-windows: executor: From a5aaa99afb81aa4632bd5691fc23a27451d90341 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 21:21:42 -0800 Subject: [PATCH 159/190] set log level before anything else --- testing/__init__.py | 3 +++ testing/test_init.py | 1 - 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/testing/__init__.py b/testing/__init__.py index d2b1b498..0602017d 100644 --- a/testing/__init__.py +++ b/testing/__init__.py @@ -1,3 +1,6 @@ +import logging import os +logging.basicConfig(level=logging.WARN) + sdk_key = os.environ.get('LD_SDK_KEY') diff --git a/testing/test_init.py b/testing/test_init.py index 2819bbcc..ca13c130 100644 --- a/testing/test_init.py +++ b/testing/test_init.py @@ -3,7 +3,6 @@ import ldclient from ldclient import Config -logging.basicConfig(level=logging.WARN) mylogger = logging.getLogger() From 9e403215690498e4068d7bf6ae0bab49b6660cac Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Nov 2019 21:23:22 -0800 Subject: [PATCH 160/190] rm Azure config --- azure-pipelines.yml | 84 --------------------------------------------- 1 file changed, 84 deletions(-) delete mode 100644 azure-pipelines.yml diff --git a/azure-pipelines.yml b/azure-pipelines.yml deleted file mode 100644 index af1f3342..00000000 --- a/azure-pipelines.yml +++ /dev/null @@ -1,84 +0,0 @@ -jobs: - - job: build - pool: - vmImage: 'vs2017-win2016' - steps: - - task: PowerShell@2 - displayName: 'Setup Dynamo' - inputs: - targetType: inline - workingDirectory: $(System.DefaultWorkingDirectory) - script: | - iwr -outf dynamo.zip https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.zip - mkdir dynamo - Expand-Archive -Path dynamo.zip -DestinationPath dynamo - cd dynamo - javaw -D"java.library.path=./DynamoDBLocal_lib" -jar DynamoDBLocal.jar - - task: PowerShell@2 - displayName: 'Setup Consul' - inputs: - targetType: inline - workingDirectory: $(System.DefaultWorkingDirectory) - script: | - iwr -outf consul.zip https://releases.hashicorp.com/consul/1.4.2/consul_1.4.2_windows_amd64.zip - mkdir consul - Expand-Archive -Path consul.zip -DestinationPath consul - cd consul - sc.exe create "Consul" binPath="$(System.DefaultWorkingDirectory)/consul/consul.exe agent -dev" - sc.exe start "Consul" - - task: PowerShell@2 - displayName: 'Setup Redis' - inputs: - targetType: inline - workingDirectory: $(System.DefaultWorkingDirectory) - script: | - iwr -outf redis.zip https://github.com/MicrosoftArchive/redis/releases/download/win-3.0.504/Redis-x64-3.0.504.zip - mkdir redis - Expand-Archive -Path redis.zip -DestinationPath redis - cd redis - ./redis-server --service-install - ./redis-server --service-start - - task: UsePythonVersion@0 - inputs: - versionSpec: '2.7' - addToPath: true - - task: PowerShell@2 - displayName: 'Setup SDK and Test 2.7' - inputs: - targetType: inline - workingDirectory: $(System.DefaultWorkingDirectory) - script: | - python --version - pip install -r test-requirements.txt - pip install -r consul-requirements.txt - python setup.py install - mkdir test-reports27 - python -m pytest -s --junitxml=test-reports27/junit.xml testing; - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.7' - addToPath: true - - task: PowerShell@2 - displayName: 'Setup SDK and Test 3.7' - inputs: - targetType: inline - workingDirectory: $(System.DefaultWorkingDirectory) - script: | - python --version - pip install -r test-requirements.txt - pip install -r consul-requirements.txt - python setup.py install - mkdir test-reports37 - python -m pytest -s --junitxml=test-reports37/junit.xml testing; - - task: CopyFiles@2 - inputs: - targetFolder: $(Build.ArtifactStagingDirectory)/test-reports27 - sourceFolder: $(System.DefaultWorkingDirectory)/test-reports27 - - task: CopyFiles@2 - inputs: - targetFolder: $(Build.ArtifactStagingDirectory)/test-reports37 - sourceFolder: $(System.DefaultWorkingDirectory)/test-reports37 - - task: PublishBuildArtifacts@1 - inputs: - pathtoPublish: '$(Build.ArtifactStagingDirectory)' - artifactName: reports From 669e7721a78cf574db7720a8a469fb62dc0e5600 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 9 Dec 2019 10:52:48 -0800 Subject: [PATCH 161/190] use yaml.safe_load() to avoid code execution vulnerability in file data source --- .../integrations/files/file_data_source.py | 2 +- testing/test_file_data_source.py | 25 +++++++++++++++++++ 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/ldclient/impl/integrations/files/file_data_source.py b/ldclient/impl/integrations/files/file_data_source.py index 785a3851..9f9f3eaf 100644 --- a/ldclient/impl/integrations/files/file_data_source.py +++ b/ldclient/impl/integrations/files/file_data_source.py @@ -80,7 +80,7 @@ def _load_file(self, path, all_data): def _parse_content(self, content): if have_yaml: - return yaml.load(content) # pyyaml correctly parses JSON too + return yaml.safe_load(content) # pyyaml correctly parses JSON too return json.loads(content) def _add_item(self, all_data, kind, item): diff --git a/testing/test_file_data_source.py b/testing/test_file_data_source.py index 78ab5359..7b13cf9b 100644 --- a/testing/test_file_data_source.py +++ b/testing/test_file_data_source.py @@ -246,3 +246,28 @@ def test_evaluates_simplified_flag_with_client_as_expected(): os.remove(path) if client is not None: client.close() + +unsafe_yaml_caused_method_to_be_called = False + +def arbitrary_method_called_from_yaml(x): + global unsafe_yaml_caused_method_to_be_called + unsafe_yaml_caused_method_to_be_called = True + +def test_does_not_allow_unsafe_yaml(): + if not have_yaml: + pytest.skip("skipping file source test with YAML because pyyaml isn't available") + + # This extended syntax defined by pyyaml allows arbitrary code execution. We should be using + # yaml.safe_load() which does not support such things. + unsafe_yaml = ''' +!!python/object/apply:testing.test_file_data_source.arbitrary_method_called_from_yaml ["hi"] +''' + path = make_temp_file(unsafe_yaml) + try: + factory = Files.new_data_source(paths = path) + client = LDClient(config=Config(update_processor_class = factory, send_events = False)) + finally: + os.remove(path) + if client is not None: + client.close() + assert unsafe_yaml_caused_method_to_be_called == False From 52238d1d44ebc661bd037004398718c4a8929780 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Mon, 16 Dec 2019 23:17:44 +0000 Subject: [PATCH 162/190] Initial work on wrapper_name, wrapper_version, diagnostic config options and start of diagnostic config event creation. --- ldclient/config.py | 41 +++++++++++++++++++++-- ldclient/diagnostics.py | 29 +++++++++++++++++ ldclient/event_processor.py | 2 +- ldclient/feature_requester.py | 2 +- ldclient/streaming.py | 2 +- ldclient/util.py | 32 +++++++++++------- testing/test_event_processor.py | 24 ++++++++++++++ testing/test_feature_requester.py | 54 ++++++++++++++++++++++++++++++- testing/test_streaming.py | 29 +++++++++++++++++ 9 files changed, 197 insertions(+), 18 deletions(-) create mode 100644 ldclient/diagnostics.py diff --git a/ldclient/config.py b/ldclient/config.py index 4ea3d6bc..517b9e18 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -44,7 +44,11 @@ def __init__(self, user_keys_capacity=1000, user_keys_flush_interval=300, inline_users_in_events=False, - http_proxy=None): + http_proxy=None, + diagnostic_opt_out=False, + diagnostic_recording_interval=900, + wrapper_name=None, + wrapper_version=None): """ :param string sdk_key: The SDK key for your LaunchDarkly account. :param string base_uri: The base URL for the LaunchDarkly server. Most users should use the default @@ -101,6 +105,15 @@ def __init__(self, variable, this is used regardless of whether the target URI is HTTP or HTTPS (the actual LaunchDarkly service uses HTTPS, but a Relay Proxy instance could use HTTP). Setting this Config parameter will override any proxy specified by an environment variable, but only for LaunchDarkly SDK connections. + :param bool diagnostic_opt_out: TODO TODO TODO + :param int diagnostic_recording_interval: TODO TODO TODO + :param string wrapper_name: For use by wrapper libraries to set an identifying name for the wrapper + being used. This will be sent in HTTP headers during requests to the LaunchDarkly servers to allow + recording metrics on the usage of these wrapper libraries. + :param string wrapper_version: For use by wrapper libraries to report the version of the library in + use. If `wrapper_name` is not set, this field will be ignored. Otherwise the version string will + be included in the HTTP headers along with the `wrapper_name` during requests to the LaunchDarkly + servers. """ self.__sdk_key = sdk_key @@ -133,6 +146,10 @@ def __init__(self, self.__user_keys_flush_interval = user_keys_flush_interval self.__inline_users_in_events = inline_users_in_events self.__http_proxy = http_proxy + self.__diagnostic_opt_out = diagnostic_opt_out + self.__diagnostic_recording_interval = diagnostic_recording_interval + self.__wrapper_name = wrapper_name + self.__wrapper_version = wrapper_version @classmethod def default(cls): @@ -171,7 +188,11 @@ def copy_with_new_sdk_key(self, new_sdk_key): offline=self.__offline, user_keys_capacity=self.__user_keys_capacity, user_keys_flush_interval=self.__user_keys_flush_interval, - inline_users_in_events=self.__inline_users_in_events) + inline_users_in_events=self.__inline_users_in_events, + diagnostic_opt_out=self.__diagnostic_opt_out, + diagnostic_recording_interval=self.__diagnostic_recording_interval, + wrapper_name=self.__wrapper_name, + wrapper_version=self.__wrapper_version) # for internal use only - probably should be part of the client logic def get_default(self, key, default): @@ -289,6 +310,22 @@ def inline_users_in_events(self): def http_proxy(self): return self.__http_proxy + @property + def diagnostic_opt_out(self): + return self.__diagnostic_opt_out + + @property + def diagnostic_recording_interval(self): + return self.__diagnostic_recording_interval + + @property + def wrapper_name(self): + return self.__wrapper_name + + @property + def wrapper_version(self): + return self.__wrapper_version + def _validate(self): if self.offline is False and self.sdk_key is None or self.sdk_key == '': log.warning("Missing or blank sdk_key.") diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py new file mode 100644 index 00000000..29c6dcb8 --- /dev/null +++ b/ldclient/diagnostics.py @@ -0,0 +1,29 @@ +DEFAULT_CONFIG = Config('sdk_key') +DEFAULT_BASE_URI = DEFAULT_CONFIG.base_uri +DEFAULT_EVENTS_URI = DEFAULT_CONFIG.events_uri +DEFAULT_STREAM_BASE_URI = DEFAULT_CONFIG.stream_base_uri + +def create_diagnostic_config_object(config): + return {'customBaseURI': False if config.base_uri == DEFAULT_BASE_URI else True, + 'customEventsURI': False if config.events_uri == DEFAULT_EVENTS_URI else True, + 'customStreamURI': False if config.stream_base_uri == DEFAULT_STREAM_BASE_URI else True, + 'eventsCapacity': config.events_max_pending, + 'connectTimeoutMillis': config.connect_timeout * 1000, + 'socketTimeoutMillis': config.read_timeout * 1000, + 'eventsFlushIntervalMillis': config.flush_interval * 1000, + 'usingProxy': False, #TODO + 'usingProxyAuthenticator': False, #TODO + 'streamingDisabled': not config.stream, + 'usingRelayDaemon': False, #TODO + 'offline': config.offline, #Check if this actually makes sense + 'allAttributesPrivate': config.all_attributes_private, + 'pollingIntervalMillis': config.poll_interval * 1000, + #'startWaitMillis': check, + #'samplingInterval': check, + #'reconnectTimeMillis': check, + 'userKeysCapacity': config.user_keys_capacity, + 'userKeysFlushIntervalMillis': config.user_keys_flush_interval * 1000, + 'inlineUsersInEvents': config.inline_users_in_events, + 'diagnosticRecordingIntervalMillis': config.diagnostic_recording_interval * 1000, + #'featureStoreFactory': check, + } diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 93680c13..29d25979 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -162,7 +162,7 @@ def _do_send(self, output_events): try: json_body = json.dumps(output_events) log.debug('Sending events payload: ' + json_body) - hdrs = _headers(self._config.sdk_key) + hdrs = _headers(self._config) hdrs['X-LaunchDarkly-Event-Schema'] = str(__CURRENT_EVENT_SCHEMA__) uri = self._config.events_uri r = self._http.request('POST', uri, diff --git a/ldclient/feature_requester.py b/ldclient/feature_requester.py index 6af810a5..983798ff 100644 --- a/ldclient/feature_requester.py +++ b/ldclient/feature_requester.py @@ -40,7 +40,7 @@ def get_one(self, kind, key): return self._do_request(self._config.base_uri + kind.request_api_path + '/' + key, False) def _do_request(self, uri, allow_cache): - hdrs = _headers(self._config.sdk_key) + hdrs = _headers(self._config) if allow_cache: cache_entry = self._cache.get(uri) if cache_entry is not None: diff --git a/ldclient/streaming.py b/ldclient/streaming.py index b3638621..391e2f52 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -86,7 +86,7 @@ def log_backoff_message(props): def _connect(self): return SSEClient( self._uri, - headers=_stream_headers(self._config.sdk_key), + headers=_stream_headers(self._config), connect_timeout=self._config.connect_timeout, read_timeout=stream_read_timeout, verify_ssl=self._config.verify_ssl, diff --git a/ldclient/util.py b/ldclient/util.py index 1d059798..23dff4fb 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -37,18 +37,26 @@ # noinspection PyUnresolvedReferences __BASE_TYPES__ = (str, float, int, bool, unicode) - -def _headers(sdk_key): - return {'Authorization': sdk_key, 'User-Agent': 'PythonClient/' + VERSION, - 'Content-Type': "application/json"} - - -def _stream_headers(sdk_key, client="PythonClient"): - return {'Authorization': sdk_key, - 'User-Agent': '{0}/{1}'.format(client, VERSION), - 'Cache-Control': 'no-cache', - 'Accept': "text/event-stream"} - +def _base_headers(config): + headers = {'Authorization': config.sdk_key, + 'User-Agent': 'PythonClient/' + VERSION} + if isinstance(config.wrapper_name, str) and config.wrapper_name != "": + wrapper_version = "" + if isinstance(config.wrapper_version, str) and config.wrapper_version != "": + wrapper_version = "/" + config.wrapper_version + headers.update({'X-LaunchDarkly-Wrapper': config.wrapper_name + wrapper_version}) + return headers + +def _headers(config): + base_headers = _base_headers(config) + base_headers.update({'Content-Type': "application/json"}) + return base_headers + +def _stream_headers(config): + base_headers = _base_headers(config) + base_headers.update({ 'Cache-Control': "no-cache" + , 'Accept': "text/event-stream" }) + return base_headers def check_uwsgi(): if 'uwsgi' in sys.modules: diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index 9ef1b4f8..61033bec 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -424,6 +424,30 @@ def test_sdk_key_is_sent(): assert mock_http.request_headers.get('Authorization') == 'SDK_KEY' +def test_wrapper_header_not_sent_when_not_set(): + with DefaultEventProcessor(Config(), mock_http) as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + assert mock_http.request_headers.get('X-LaunchDarkly-Wrapper') is None + +def test_wrapper_header_sent_when_set(): + with DefaultEventProcessor(Config(wrapper_name = "Flask", wrapper_version = "0.0.1"), mock_http) as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + assert mock_http.request_headers.get('X-LaunchDarkly-Wrapper') == "Flask/0.0.1" + +def test_wrapper_header_sent_without_version(): + with DefaultEventProcessor(Config(wrapper_name = "Flask"), mock_http) as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + assert mock_http.request_headers.get('X-LaunchDarkly-Wrapper') == "Flask" + def test_no_more_payloads_are_sent_after_401_error(): verify_unrecoverable_http_error(401) diff --git a/testing/test_feature_requester.py b/testing/test_feature_requester.py index 658c8157..f4837d7e 100644 --- a/testing/test_feature_requester.py +++ b/testing/test_feature_requester.py @@ -34,6 +34,33 @@ def test_get_all_data_sends_headers(): req = server.require_request() assert req.headers['Authorization'] == 'sdk-key' assert req.headers['User-Agent'] == 'PythonClient/' + VERSION + assert req.headers['X-LaunchDarkly-Wrapper'] is None + +def test_get_all_data_sends_wrapper_header(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri, + wrapper_name = 'Flask', wrapper_version = '0.1.0') + fr = FeatureRequesterImpl(config) + + resp_data = { 'flags': {}, 'segments': {} } + server.setup_json_response('/sdk/latest-all', resp_data) + + fr.get_all_data() + req = server.require_request() + assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask/0.1.0' + +def test_get_all_data_sends_wrapper_header_without_version(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri, + wrapper_name = 'Flask') + fr = FeatureRequesterImpl(config) + + resp_data = { 'flags': {}, 'segments': {} } + server.setup_json_response('/sdk/latest-all', resp_data) + + fr.get_all_data() + req = server.require_request() + assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask' def test_get_all_data_can_use_cached_data(): with start_server() as server: @@ -96,6 +123,31 @@ def test_get_one_flag_sends_headers(): req = server.require_request() assert req.headers['Authorization'] == 'sdk-key' assert req.headers['User-Agent'] == 'PythonClient/' + VERSION + assert req.headers['X-LaunchDarkly-Wrapper'] is None + +def test_get_one_flag_sends_wrapper_header(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri, + wrapper_name = 'Flask', wrapper_version = '0.1.0') + fr = FeatureRequesterImpl(config) + key = 'flag1' + flag_data = { 'key': key } + server.setup_json_response('/sdk/latest-flags/' + key, flag_data) + fr.get_one(FEATURES, key) + req = server.require_request() + assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask/0.1.0' + +def test_get_one_flag_sends_wrapper_header_without_version(): + with start_server() as server: + config = Config(sdk_key = 'sdk-key', base_uri = server.uri, + wrapper_name = 'Flask') + fr = FeatureRequesterImpl(config) + key = 'flag1' + flag_data = { 'key': key } + server.setup_json_response('/sdk/latest-flags/' + key, flag_data) + fr.get_one(FEATURES, key) + req = server.require_request() + assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask' def test_get_one_flag_throws_on_error(): with start_server() as server: @@ -176,4 +228,4 @@ def _verify_https_proxy_is_used(server, config): except: pass req = server.require_request() - assert req.method == 'CONNECT' \ No newline at end of file + assert req.method == 'CONNECT' diff --git a/testing/test_streaming.py b/testing/test_streaming.py index 65ba0542..e784aa6c 100644 --- a/testing/test_streaming.py +++ b/testing/test_streaming.py @@ -42,6 +42,35 @@ def test_sends_headers(): req = server.await_request() assert req.headers['Authorization'] == 'sdk-key' assert req.headers['User-Agent'] == 'PythonClient/' + VERSION + assert req.headers['X-LaunchDarkly-Wrapper'] is None + +def test_sends_wrapper_header(): + store = InMemoryFeatureStore() + ready = Event() + + with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = server.uri, + wrapper_name = 'Flask', wrapper_version = '0.1.0') + server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + + with StreamingUpdateProcessor(config, None, store, ready) as sp: + sp.start() + req = server.await_request() + assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask/0.1.0' + +def test_sends_wrapper_header_without_version(): + store = InMemoryFeatureStore() + ready = Event() + + with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = server.uri, + wrapper_name = 'Flask') + server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + + with StreamingUpdateProcessor(config, None, store, ready) as sp: + sp.start() + req = server.await_request() + assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask' def test_can_use_http_proxy_via_environment_var(monkeypatch): with start_server() as server: From 38d08bdd935d2ce37a5c28952e428efacc053eff Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Tue, 17 Dec 2019 00:07:46 +0000 Subject: [PATCH 163/190] Python 2 compat changes. --- testing/test_feature_requester.py | 12 ++++++------ testing/test_streaming.py | 10 +++++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/testing/test_feature_requester.py b/testing/test_feature_requester.py index f4837d7e..da72442c 100644 --- a/testing/test_feature_requester.py +++ b/testing/test_feature_requester.py @@ -34,7 +34,7 @@ def test_get_all_data_sends_headers(): req = server.require_request() assert req.headers['Authorization'] == 'sdk-key' assert req.headers['User-Agent'] == 'PythonClient/' + VERSION - assert req.headers['X-LaunchDarkly-Wrapper'] is None + assert req.headers.get('X-LaunchDarkly-Wrapper') is None def test_get_all_data_sends_wrapper_header(): with start_server() as server: @@ -47,7 +47,7 @@ def test_get_all_data_sends_wrapper_header(): fr.get_all_data() req = server.require_request() - assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask/0.1.0' + assert req.headers.get('X-LaunchDarkly-Wrapper') == 'Flask/0.1.0' def test_get_all_data_sends_wrapper_header_without_version(): with start_server() as server: @@ -60,7 +60,7 @@ def test_get_all_data_sends_wrapper_header_without_version(): fr.get_all_data() req = server.require_request() - assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask' + assert req.headers.get('X-LaunchDarkly-Wrapper') == 'Flask' def test_get_all_data_can_use_cached_data(): with start_server() as server: @@ -123,7 +123,7 @@ def test_get_one_flag_sends_headers(): req = server.require_request() assert req.headers['Authorization'] == 'sdk-key' assert req.headers['User-Agent'] == 'PythonClient/' + VERSION - assert req.headers['X-LaunchDarkly-Wrapper'] is None + assert req.headers.get('X-LaunchDarkly-Wrapper') is None def test_get_one_flag_sends_wrapper_header(): with start_server() as server: @@ -135,7 +135,7 @@ def test_get_one_flag_sends_wrapper_header(): server.setup_json_response('/sdk/latest-flags/' + key, flag_data) fr.get_one(FEATURES, key) req = server.require_request() - assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask/0.1.0' + assert req.headers.get('X-LaunchDarkly-Wrapper') == 'Flask/0.1.0' def test_get_one_flag_sends_wrapper_header_without_version(): with start_server() as server: @@ -147,7 +147,7 @@ def test_get_one_flag_sends_wrapper_header_without_version(): server.setup_json_response('/sdk/latest-flags/' + key, flag_data) fr.get_one(FEATURES, key) req = server.require_request() - assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask' + assert req.headers.get('X-LaunchDarkly-Wrapper') == 'Flask' def test_get_one_flag_throws_on_error(): with start_server() as server: diff --git a/testing/test_streaming.py b/testing/test_streaming.py index e784aa6c..37cf0148 100644 --- a/testing/test_streaming.py +++ b/testing/test_streaming.py @@ -40,9 +40,9 @@ def test_sends_headers(): with StreamingUpdateProcessor(config, None, store, ready) as sp: sp.start() req = server.await_request() - assert req.headers['Authorization'] == 'sdk-key' - assert req.headers['User-Agent'] == 'PythonClient/' + VERSION - assert req.headers['X-LaunchDarkly-Wrapper'] is None + assert req.headers.get('Authorization') == 'sdk-key' + assert req.headers.get('User-Agent') == 'PythonClient/' + VERSION + assert req.headers.get('X-LaunchDarkly-Wrapper') is None def test_sends_wrapper_header(): store = InMemoryFeatureStore() @@ -56,7 +56,7 @@ def test_sends_wrapper_header(): with StreamingUpdateProcessor(config, None, store, ready) as sp: sp.start() req = server.await_request() - assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask/0.1.0' + assert req.headers.get('X-LaunchDarkly-Wrapper') == 'Flask/0.1.0' def test_sends_wrapper_header_without_version(): store = InMemoryFeatureStore() @@ -70,7 +70,7 @@ def test_sends_wrapper_header_without_version(): with StreamingUpdateProcessor(config, None, store, ready) as sp: sp.start() req = server.await_request() - assert req.headers['X-LaunchDarkly-Wrapper'] == 'Flask' + assert req.headers.get('X-LaunchDarkly-Wrapper') == 'Flask' def test_can_use_http_proxy_via_environment_var(monkeypatch): with start_server() as server: From e3eb3ee38b33b4b80edfba4c29fe6acc4f78d5f6 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Tue, 17 Dec 2019 21:29:06 +0000 Subject: [PATCH 164/190] More event generation code and starting to integrate tracking diagnostic values. --- ldclient/diagnostics.py | 32 +++++++++++++++++++++++++++++--- ldclient/event_processor.py | 31 +++++++++++++++++++++++++++---- 2 files changed, 56 insertions(+), 7 deletions(-) diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py index 29c6dcb8..d6109afb 100644 --- a/ldclient/diagnostics.py +++ b/ldclient/diagnostics.py @@ -3,10 +3,23 @@ DEFAULT_EVENTS_URI = DEFAULT_CONFIG.events_uri DEFAULT_STREAM_BASE_URI = DEFAULT_CONFIG.stream_base_uri +def diagnostic_base_fields(kind, creation_date, diagnostic_id): + return {'kind': kind, + 'creationDate': creation_date, + 'id': diagnostic_id} + +def create_diagnostic_statistics(creation_date, diagnostic_id, data_since_date, dropped_events, deduplicated_users, events_in_last_batch): + base_object = diagnostic_base_fields('diagnostic', creation_date, diagnostic_id) + base_object.update({'dataSinceDate': data_since_date, + 'droppedEvents': dropped_events, + 'deduplicatedUsers': deduplicated_users, + 'eventsInLastBatch': events_in_last_batch}) + return base_object + def create_diagnostic_config_object(config): - return {'customBaseURI': False if config.base_uri == DEFAULT_BASE_URI else True, - 'customEventsURI': False if config.events_uri == DEFAULT_EVENTS_URI else True, - 'customStreamURI': False if config.stream_base_uri == DEFAULT_STREAM_BASE_URI else True, + return {'customBaseURI': config.base_uri != DEFAULT_BASE_URI, + 'customEventsURI': config.events_uri != DEFAULT_EVENTS_URI, + 'customStreamURI': config.stream_base_uri != DEFAULT_STREAM_BASE_URI, 'eventsCapacity': config.events_max_pending, 'connectTimeoutMillis': config.connect_timeout * 1000, 'socketTimeoutMillis': config.read_timeout * 1000, @@ -27,3 +40,16 @@ def create_diagnostic_config_object(config): 'diagnosticRecordingIntervalMillis': config.diagnostic_recording_interval * 1000, #'featureStoreFactory': check, } + +def create_diagnostic_sdk_object(): + return {} + +def create_diagnostic_platform_object(): + return {} + +def create_diagnostic_init(creation_date, diagnostic_id, config): + base_object = diagnostic_base_fields('diagnostic-init', creation_date, diagnostic_id) + base_object.update({'configuration': create_diagnostic_config_object(config), + 'sdk': create_diagnostic_sdk_object(), + 'platform': create_diagnostic_platform_object()}) + return base_object diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 29d25979..c66d6aac 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -186,9 +186,11 @@ def __init__(self, capacity): self._events = [] self._summarizer = EventSummarizer() self._exceeded_capacity = False + self._dropped_events = 0 def add_event(self, event): if len(self._events) >= self._capacity: + self._dropped_events = self._dropped_events + 1 if not self._exceeded_capacity: log.warning("Exceeded event queue capacity. Increase capacity to avoid dropping events.") self._exceeded_capacity = True @@ -198,7 +200,12 @@ def add_event(self, event): def add_to_summary(self, event): self._summarizer.summarize_event(event) - + + def get_and_clear_dropped_count(self): + ret = self._dropped_events + self._dropped_events = 0 + return ret + def get_payload(self): return FlushPayload(self._events, self._summarizer.snapshot()) @@ -219,6 +226,7 @@ def __init__(self, inbox, config, http_client): self._user_keys = SimpleLRUCache(config.user_keys_capacity) self._formatter = EventOutputFormatter(config) self._last_known_past_time = 0 + self._deduplicated_users = 0 self._flush_workers = FixedThreadPool(__MAX_FLUSH_THREADS__, "ldclient.flush") @@ -237,6 +245,8 @@ def _run_main_loop(self): self._trigger_flush() elif message.type == 'flush_users': self._user_keys.clear() + elif message.type == 'diagnostic': + self._send_and_reset_diagnostics() elif message.type == 'test_sync': self._flush_workers.wait() message.param.set() @@ -269,9 +279,12 @@ def _process_event(self, event): # an identify event for that user. if not (add_full_event and self._config.inline_users_in_events): user = event.get('user') - if user and not self.notice_user(user): - if event['kind'] != 'identify': - add_index_event = True + if user and 'key' in user: + is_index_event = event['kind'] == 'identify' + already_seen = self.notice_user(user) + add_index_event = not is_index_event and not already_seen + if not is_index_event and already_seen: + self._deduplicated_users = self._deduplicated_users + 1 if add_index_event: ie = { 'kind': 'index', 'creationDate': event['creationDate'], 'user': user } @@ -326,6 +339,10 @@ def _handle_response(self, r): self._disabled = True return + def _send_and_reset_diagnostics(self): + dropped_event_count = self._outbox.get_and_clear_dropped_count() + return + def _do_shutdown(self): self._flush_workers.stop() self._flush_workers.wait() @@ -341,6 +358,9 @@ def __init__(self, config, http=None, dispatcher_class=None): self._users_flush_timer = RepeatingTimer(config.user_keys_flush_interval, self._flush_users) self._flush_timer.start() self._users_flush_timer.start() + if not config.diagnostic_opt_out: + self._diagnostic_event_timer = RepeatingTimer(config.diagnostic_recording_interval, self._send_diagnostic) + self._diagnostic_event_timer.start() self._close_lock = Lock() self._closed = False (dispatcher_class or EventDispatcher)(self._inbox, config, http) @@ -376,6 +396,9 @@ def _post_to_inbox(self, message): def _flush_users(self): self._inbox.put(EventProcessorMessage('flush_users', None)) + def _send_diagnostic(self): + self._inbox.put(EventProcessorMessage('diagnostic', None)) + # Used only in tests def _wait_until_inactive(self): self._post_message_and_wait('test_sync') From 2d801980829d309344a8766d4932161d41bddc9e Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Fri, 20 Dec 2019 19:26:12 +0000 Subject: [PATCH 165/190] Add minimum diagnostic recording interval. Fix diagnostic.py to be importable. Add more diagnostic event fields. --- ldclient/config.py | 2 +- ldclient/diagnostics.py | 31 ++++++++++++++++--------------- ldclient/event_processor.py | 3 ++- 3 files changed, 19 insertions(+), 17 deletions(-) diff --git a/ldclient/config.py b/ldclient/config.py index 517b9e18..c040e9c4 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -147,7 +147,7 @@ def __init__(self, self.__inline_users_in_events = inline_users_in_events self.__http_proxy = http_proxy self.__diagnostic_opt_out = diagnostic_opt_out - self.__diagnostic_recording_interval = diagnostic_recording_interval + self.__diagnostic_recording_interval = max(diagnostic_recording_interval, 60) self.__wrapper_name = wrapper_name self.__wrapper_version = wrapper_version diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py index d6109afb..0b6c5cfc 100644 --- a/ldclient/diagnostics.py +++ b/ldclient/diagnostics.py @@ -1,7 +1,7 @@ -DEFAULT_CONFIG = Config('sdk_key') -DEFAULT_BASE_URI = DEFAULT_CONFIG.base_uri -DEFAULT_EVENTS_URI = DEFAULT_CONFIG.events_uri -DEFAULT_STREAM_BASE_URI = DEFAULT_CONFIG.stream_base_uri +#DEFAULT_CONFIG = Config.default() +#DEFAULT_BASE_URI = DEFAULT_CONFIG.base_uri +#DEFAULT_EVENTS_URI = DEFAULT_CONFIG.events_uri +#DEFAULT_STREAM_BASE_URI = DEFAULT_CONFIG.stream_base_uri def diagnostic_base_fields(kind, creation_date, diagnostic_id): return {'kind': kind, @@ -17,22 +17,20 @@ def create_diagnostic_statistics(creation_date, diagnostic_id, data_since_date, return base_object def create_diagnostic_config_object(config): - return {'customBaseURI': config.base_uri != DEFAULT_BASE_URI, - 'customEventsURI': config.events_uri != DEFAULT_EVENTS_URI, - 'customStreamURI': config.stream_base_uri != DEFAULT_STREAM_BASE_URI, + default_config = Config.default() + return {'customBaseURI': config.base_uri != default_config.base_uri, + 'customEventsURI': config.events_uri != default_config.events_uri, + 'customStreamURI': config.stream_base_uri != default_config.stream_base_uri, 'eventsCapacity': config.events_max_pending, 'connectTimeoutMillis': config.connect_timeout * 1000, 'socketTimeoutMillis': config.read_timeout * 1000, 'eventsFlushIntervalMillis': config.flush_interval * 1000, - 'usingProxy': False, #TODO - 'usingProxyAuthenticator': False, #TODO + 'usingProxy': config.http_proxy is not None, 'streamingDisabled': not config.stream, - 'usingRelayDaemon': False, #TODO + 'usingRelayDaemon': config.use_ldd, 'offline': config.offline, #Check if this actually makes sense 'allAttributesPrivate': config.all_attributes_private, 'pollingIntervalMillis': config.poll_interval * 1000, - #'startWaitMillis': check, - #'samplingInterval': check, #'reconnectTimeMillis': check, 'userKeysCapacity': config.user_keys_capacity, 'userKeysFlushIntervalMillis': config.user_keys_flush_interval * 1000, @@ -41,11 +39,14 @@ def create_diagnostic_config_object(config): #'featureStoreFactory': check, } -def create_diagnostic_sdk_object(): - return {} +def create_diagnostic_sdk_object(config): + return {'name': 'python-server-sdk', + 'version': VERSION, + 'wrapperName': config.wrapper_name, + 'wrapperVersion': config.wrapper_version} def create_diagnostic_platform_object(): - return {} + return {'name': 'python'} def create_diagnostic_init(creation_date, diagnostic_id, config): base_object = diagnostic_base_fields('diagnostic-init', creation_date, diagnostic_id) diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index c66d6aac..2b5b952e 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -30,7 +30,7 @@ from ldclient.util import create_http_pool_manager from ldclient.util import log from ldclient.util import http_error_message, is_http_error_recoverable, stringify_attrs, throw_if_unsuccessful_response - +from ldclient.diagnostics import create_diagnostic_init, create_diagnostic_statistics __MAX_FLUSH_THREADS__ = 5 __CURRENT_EVENT_SCHEMA__ = 3 @@ -341,6 +341,7 @@ def _handle_response(self, r): def _send_and_reset_diagnostics(self): dropped_event_count = self._outbox.get_and_clear_dropped_count() + stats_event = create_diagnostic_statistics(1, 0, 0, dropped_event_count, self._deduplicated_users, 0) return def _do_shutdown(self): From 7fd454fd6ee0ab32e3b1bb52ea57fc91e9514397 Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Mon, 23 Dec 2019 16:01:04 -0800 Subject: [PATCH 166/190] don't let user fall outside of last bucket in rollout --- ldclient/flag.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/ldclient/flag.py b/ldclient/flag.py index 11a5be41..3ff80fab 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -209,6 +209,13 @@ def _variation_index_for_user(feature, rule, user): if bucket < sum: return wv.get('variation') + # The user's bucket value was greater than or equal to the end of the last bucket. This could happen due + # to a rounding error, or due to the fact that we are scaling to 100000 rather than 99999, or the flag + # data could contain buckets that don't actually add up to 100000. Rather than returning an error in + # this case (or changing the scaling, which would potentially change the results for *all* users), we + # will simply put the user in the last bucket. + return rule['rollout'].get('variations')[len(rule['rollout'].get('variations'))].get('variation') + return None From 588f352b0ade70519ef2085f8362676d2106cf46 Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Mon, 23 Dec 2019 16:13:38 -0800 Subject: [PATCH 167/190] fixing conditional logic --- ldclient/flag.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ldclient/flag.py b/ldclient/flag.py index 3ff80fab..332d165e 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -198,13 +198,13 @@ def _variation_index_for_user(feature, rule, user): if rule.get('variation') is not None: return rule['variation'] - if rule.get('rollout') is not None: + if rule.get('rollout') is not None and rule['rollout'].get('variations') is not None and len(rule['rollout'].get('variations')) > 0: bucket_by = 'key' if rule['rollout'].get('bucketBy') is not None: bucket_by = rule['rollout']['bucketBy'] bucket = _bucket_user(user, feature['key'], feature['salt'], bucket_by) sum = 0.0 - for wv in rule['rollout'].get('variations') or []: + for wv in rule['rollout'].get('variations'): sum += wv.get('weight', 0.0) / 100000.0 if bucket < sum: return wv.get('variation') From 7b357b0c7e3f751058811cd2ee1968eb699cdd86 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Sat, 28 Dec 2019 14:57:58 +0000 Subject: [PATCH 168/190] Add docstrings for diagnostic configuration options. --- ldclient/config.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/ldclient/config.py b/ldclient/config.py index c040e9c4..65a39797 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -105,8 +105,13 @@ def __init__(self, variable, this is used regardless of whether the target URI is HTTP or HTTPS (the actual LaunchDarkly service uses HTTPS, but a Relay Proxy instance could use HTTP). Setting this Config parameter will override any proxy specified by an environment variable, but only for LaunchDarkly SDK connections. - :param bool diagnostic_opt_out: TODO TODO TODO - :param int diagnostic_recording_interval: TODO TODO TODO + :param bool diagnostic_opt_out: Unless the diagnosticOptOut field is set to True, the client will send + some diagnostics data to the LaunchDarkly servers in order to assist in the development of future SDK + improvements. These diagnostics consist of an initial payload containing some details of SDK in use, + the SDK's configuration, and the platform the SDK is being run on; as well as payloads sent + periodically with information on irregular occurrences such as dropped events. + :param int diagnostic_recording_interval: The interval in seconds at which periodic diagnostic data is + sent. The default is 900 seconds (every 15 minutes) and the minimum value is 60 seconds. :param string wrapper_name: For use by wrapper libraries to set an identifying name for the wrapper being used. This will be sent in HTTP headers during requests to the LaunchDarkly servers to allow recording metrics on the usage of these wrapper libraries. From af5a1621cda4f45cd49a436a2f413783afbf67b4 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 30 Dec 2019 12:04:04 -0800 Subject: [PATCH 169/190] fix off-by-1 error --- ldclient/flag.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldclient/flag.py b/ldclient/flag.py index 332d165e..f5e9a237 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -214,7 +214,7 @@ def _variation_index_for_user(feature, rule, user): # data could contain buckets that don't actually add up to 100000. Rather than returning an error in # this case (or changing the scaling, which would potentially change the results for *all* users), we # will simply put the user in the last bucket. - return rule['rollout'].get('variations')[len(rule['rollout'].get('variations'))].get('variation') + return rule['rollout'].get('variations')[-1].get('variation') return None From 75a9aabc53c958cb7ce257f9bb755365365581a9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 30 Dec 2019 12:04:37 -0800 Subject: [PATCH 170/190] avoid redundant dict lookups --- ldclient/flag.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/ldclient/flag.py b/ldclient/flag.py index f5e9a237..422a56f0 100644 --- a/ldclient/flag.py +++ b/ldclient/flag.py @@ -198,13 +198,17 @@ def _variation_index_for_user(feature, rule, user): if rule.get('variation') is not None: return rule['variation'] - if rule.get('rollout') is not None and rule['rollout'].get('variations') is not None and len(rule['rollout'].get('variations')) > 0: + rollout = rule.get('rollout') + if rollout is None: + return None + variations = rollout.get('variations') + if variations is not None and len(variations) > 0: bucket_by = 'key' - if rule['rollout'].get('bucketBy') is not None: - bucket_by = rule['rollout']['bucketBy'] + if rollout.get('bucketBy') is not None: + bucket_by = rollout['bucketBy'] bucket = _bucket_user(user, feature['key'], feature['salt'], bucket_by) sum = 0.0 - for wv in rule['rollout'].get('variations'): + for wv in variations: sum += wv.get('weight', 0.0) / 100000.0 if bucket < sum: return wv.get('variation') @@ -214,7 +218,7 @@ def _variation_index_for_user(feature, rule, user): # data could contain buckets that don't actually add up to 100000. Rather than returning an error in # this case (or changing the scaling, which would potentially change the results for *all* users), we # will simply put the user in the last bucket. - return rule['rollout'].get('variations')[-1].get('variation') + return variations[-1].get('variation') return None From 590ca64ae68c772b71b905cff14f5a046bbc6f09 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 30 Dec 2019 12:04:56 -0800 Subject: [PATCH 171/190] add unit tests for basic bucketing logic and edge case --- testing/test_flag.py | 43 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/testing/test_flag.py b/testing/test_flag.py index ced400e5..6b50b55a 100644 --- a/testing/test_flag.py +++ b/testing/test_flag.py @@ -1,6 +1,7 @@ +import math import pytest from ldclient.feature_store import InMemoryFeatureStore -from ldclient.flag import EvaluationDetail, EvalResult, _bucket_user, evaluate +from ldclient.flag import EvaluationDetail, EvalResult, _bucket_user, _variation_index_for_user, evaluate from ldclient.impl.event_factory import _EventFactory from ldclient.versioned_data_kind import FEATURES, SEGMENTS @@ -384,7 +385,47 @@ def _make_bool_flag_from_clause(clause): 'variations': [ False, True ] } +def test_variation_index_is_returned_for_bucket(): + user = { 'key': 'userkey' } + flag = { 'key': 'flagkey', 'salt': 'salt' } + + # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, + # so we can construct a rollout whose second bucket just barely contains that value + bucket_value = math.trunc(_bucket_user(user, flag['key'], flag['salt'], 'key') * 100000) + assert bucket_value > 0 and bucket_value < 100000 + + bad_variation_a = 0 + matched_variation = 1 + bad_variation_b = 2 + rule = { + 'rollout': { + 'variations': [ + { 'variation': bad_variation_a, 'weight': bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value + { 'variation': matched_variation, 'weight': 1 }, # size of this bucket is 1, so it only matches that specific value + { 'variation': bad_variation_b, 'weight': 100000 - (bucket_value + 1) } + ] + } + } + result_variation = _variation_index_for_user(flag, rule, user) + assert result_variation == matched_variation +def test_last_bucket_is_used_if_bucket_value_equals_total_weight(): + user = { 'key': 'userkey' } + flag = { 'key': 'flagkey', 'salt': 'salt' } + + # We'll construct a list of variations that stops right at the target bucket value + bucket_value = math.trunc(_bucket_user(user, flag['key'], flag['salt'], 'key') * 100000) + + rule = { + 'rollout': { + 'variations': [ + { 'variation': 0, 'weight': bucket_value } + ] + } + } + result_variation = _variation_index_for_user(flag, rule, user) + assert result_variation == 0 + def test_bucket_by_user_key(): user = { u'key': u'userKeyA' } bucket = _bucket_user(user, 'hashKey', 'saltyA', 'key') From 0f09a732077a182ddbcdc2da212cc1a2ac348d59 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Mon, 30 Dec 2019 21:24:28 +0000 Subject: [PATCH 172/190] Stream init tracking. Feeding of accumulator object through SDK. Various fixes. --- ldclient/client.py | 11 +++-- ldclient/config.py | 5 +++ ldclient/diagnostics.py | 77 +++++++++++++++++++++++---------- ldclient/event_processor.py | 64 ++++++++++++++++++++++++--- ldclient/streaming.py | 12 ++++- testing/test_event_processor.py | 69 ++++++++++++++--------------- testing/test_streaming.py | 12 ++--- 7 files changed, 178 insertions(+), 72 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index 825d542c..c51b2b53 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -105,8 +105,13 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): self._event_processor = self._make_event_processor(self._config) + if callable(getattr(self._event_processor, 'retrieve_diagnostic_accumulator', None)): + diagnostic_accumulator = self._event_processor.retrieve_diagnostic_accumulator() + else: + diagnostic_accumulator = None + update_processor_ready = threading.Event() - self._update_processor = self._make_update_processor(self._config, self._store, update_processor_ready) + self._update_processor = self._make_update_processor(self._config, self._store, update_processor_ready, diagnostic_accumulator) self._update_processor.start() if start_wait > 0 and not self._config.offline and not self._config.use_ldd: @@ -124,7 +129,7 @@ def _make_event_processor(self, config): return NullEventProcessor() return config.event_processor_class(config) - def _make_update_processor(self, config, store, ready): + def _make_update_processor(self, config, store, ready, diagnostic_accumulator): if config.update_processor_class: log.info("Using user-specified update processor: " + str(config.update_processor_class)) return config.update_processor_class(config, store, ready) @@ -139,7 +144,7 @@ def _make_update_processor(self, config, store, ready): """ :type: FeatureRequester """ if config.stream: - return StreamingUpdateProcessor(config, feature_requester, store, ready) + return StreamingUpdateProcessor(config, feature_requester, store, ready, diagnostic_accumulator) log.info("Disabling streaming API") log.warning("You should only disable the streaming API if instructed to do so by LaunchDarkly support") diff --git a/ldclient/config.py b/ldclient/config.py index 65a39797..8b1ee411 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -216,6 +216,11 @@ def base_uri(self): def get_latest_flags_uri(self): return self.__base_uri + GET_LATEST_FEATURES_PATH + # for internal use only + @property + def events_base_uri(self): + return self.__events_uri + # for internal use only - should construct the URL path in the events code, not here @property def events_uri(self): diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py index 0b6c5cfc..3acb96fd 100644 --- a/ldclient/diagnostics.py +++ b/ldclient/diagnostics.py @@ -2,22 +2,63 @@ #DEFAULT_BASE_URI = DEFAULT_CONFIG.base_uri #DEFAULT_EVENTS_URI = DEFAULT_CONFIG.events_uri #DEFAULT_STREAM_BASE_URI = DEFAULT_CONFIG.stream_base_uri +import threading +import time +import uuid -def diagnostic_base_fields(kind, creation_date, diagnostic_id): +class _DiagnosticAccumulator(object): + def __init__(self, diagnostic_id): + self.diagnostic_id = diagnostic_id + self.data_since_date = int(time.time() * 1000) + self._state_lock = threading.Lock() + self._events_in_last_batch = 0 + self._stream_inits = [] + + def record_stream_init(self, timestamp, duration, failed): + with self._state_lock: + self._stream_inits.append({'timestamp': timestamp, + 'durationMillis': duration, + 'failed': failed}) + + def record_events_in_batch(self, events_in_batch): + with self._state_lock: + self._events_in_last_batch = events_in_batch + + def create_event_and_reset(self, dropped_events, deduplicated_users): + with self._state_lock: + events_in_batch = self._events_in_last_batch + stream_inits = self._stream_inits + self._events_in_last_batch = 0 + self._stream_inits = [] + + current_time = int(time.time() * 1000) + periodic_event = _diagnostic_base_fields('diagnostic', current_time, self.diagnostic_id) + periodic_event.update({'dataSincedate': self.data_since_date, + 'droppedEvents': dropped_events, + 'deduplicatedUsers': deduplicated_users, + 'eventsInLastBatch': events_in_batch, + 'streamInits': stream_inits}) + self.data_since_date = current_time + return periodic_event + +def create_diagnostic_id(config): + return {'diagnosticId': str(uuid.uuid4()), + 'sdkKeySuffix': '' if not config.sdk_key else config.sdk_key[-6:]} + +def create_diagnostic_init(creation_date, diagnostic_id, config): + base_object = _diagnostic_base_fields('diagnostic-init', creation_date, diagnostic_id) + base_object.update({'configuration': _create_diagnostic_config_object(config), + 'sdk': _create_diagnostic_sdk_object(config), + 'platform': _create_diagnostic_platform_object()}) + return base_object + +def _diagnostic_base_fields(kind, creation_date, diagnostic_id): return {'kind': kind, 'creationDate': creation_date, 'id': diagnostic_id} -def create_diagnostic_statistics(creation_date, diagnostic_id, data_since_date, dropped_events, deduplicated_users, events_in_last_batch): - base_object = diagnostic_base_fields('diagnostic', creation_date, diagnostic_id) - base_object.update({'dataSinceDate': data_since_date, - 'droppedEvents': dropped_events, - 'deduplicatedUsers': deduplicated_users, - 'eventsInLastBatch': events_in_last_batch}) - return base_object - -def create_diagnostic_config_object(config): - default_config = Config.default() +def _create_diagnostic_config_object(config): + default_config = config.default() return {'customBaseURI': config.base_uri != default_config.base_uri, 'customEventsURI': config.events_uri != default_config.events_uri, 'customStreamURI': config.stream_base_uri != default_config.stream_base_uri, @@ -28,7 +69,6 @@ def create_diagnostic_config_object(config): 'usingProxy': config.http_proxy is not None, 'streamingDisabled': not config.stream, 'usingRelayDaemon': config.use_ldd, - 'offline': config.offline, #Check if this actually makes sense 'allAttributesPrivate': config.all_attributes_private, 'pollingIntervalMillis': config.poll_interval * 1000, #'reconnectTimeMillis': check, @@ -39,18 +79,11 @@ def create_diagnostic_config_object(config): #'featureStoreFactory': check, } -def create_diagnostic_sdk_object(config): +def _create_diagnostic_sdk_object(config): return {'name': 'python-server-sdk', - 'version': VERSION, + 'version': 6, #VERSION, 'wrapperName': config.wrapper_name, 'wrapperVersion': config.wrapper_version} -def create_diagnostic_platform_object(): +def _create_diagnostic_platform_object(): return {'name': 'python'} - -def create_diagnostic_init(creation_date, diagnostic_id, config): - base_object = diagnostic_base_fields('diagnostic-init', creation_date, diagnostic_id) - base_object.update({'configuration': create_diagnostic_config_object(config), - 'sdk': create_diagnostic_sdk_object(), - 'platform': create_diagnostic_platform_object()}) - return base_object diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 2b5b952e..c7ce1b27 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -30,7 +30,7 @@ from ldclient.util import create_http_pool_manager from ldclient.util import log from ldclient.util import http_error_message, is_http_error_recoverable, stringify_attrs, throw_if_unsuccessful_response -from ldclient.diagnostics import create_diagnostic_init, create_diagnostic_statistics +from ldclient.diagnostics import create_diagnostic_init, create_diagnostic_id, _DiagnosticAccumulator __MAX_FLUSH_THREADS__ = 5 __CURRENT_EVENT_SCHEMA__ = 3 @@ -177,6 +177,40 @@ def _do_send(self, output_events): 'Unhandled exception in event processor. Analytics events were not processed. [%s]', e) +class DiagnosticEventSendTask(object): + def __init__(self, http, config, event_body, response_fn): + self._http = http + self._config = config + self._event_body = event_body + self._response_fn = response_fn + + def run_thread(self): + try: + Thread(target = self._do_send()).start() + except Exception: + log.warning( + 'Unhandled exception in event processor. Analytics events were not processed.', + exc_info=True) + + def _do_send(self): + # noinspection PyBroadException + try: + json_body = json.dumps(self._event_body) + log.debug('Sending diagnostic event: ' + json_body) + hdrs = _headers(self._config) + uri = self._config.events_base_uri + '/diagnostic' + r = self._http.request('POST', uri, + headers=hdrs, + timeout=urllib3.Timeout(connect=self._config.connect_timeout, read=self._config.read_timeout), + body=json_body, + retries=1) + if (self._response_fn): + self._response_fn(r) + except Exception as e: + log.warning( + 'Unhandled exception in event processor. Diagnostic event was not sent. [%s]', e) + + FlushPayload = namedtuple('FlushPayload', ['events', 'summary']) @@ -215,7 +249,7 @@ def clear(self): class EventDispatcher(object): - def __init__(self, inbox, config, http_client): + def __init__(self, inbox, config, http_client, diagnostic_accumulator=None): self._inbox = inbox self._config = config self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl, @@ -227,6 +261,7 @@ def __init__(self, inbox, config, http_client): self._formatter = EventOutputFormatter(config) self._last_known_past_time = 0 self._deduplicated_users = 0 + self._diagnostic_accumulator = diagnostic_accumulator self._flush_workers = FixedThreadPool(__MAX_FLUSH_THREADS__, "ldclient.flush") @@ -340,9 +375,11 @@ def _handle_response(self, r): return def _send_and_reset_diagnostics(self): - dropped_event_count = self._outbox.get_and_clear_dropped_count() - stats_event = create_diagnostic_statistics(1, 0, 0, dropped_event_count, self._deduplicated_users, 0) - return + if (self._diagnostic_accumulator): + dropped_event_count = self._outbox.get_and_clear_dropped_count() + stats_event = self._diagnostic_accumulator.create_event_and_reset(dropped_event_count, self._deduplicated_users) + self._deduplicated_users = 0 + DiagnosticEventSendTask(self._http, self._config, stats_event, None).run_thread() def _do_shutdown(self): self._flush_workers.stop() @@ -359,12 +396,24 @@ def __init__(self, config, http=None, dispatcher_class=None): self._users_flush_timer = RepeatingTimer(config.user_keys_flush_interval, self._flush_users) self._flush_timer.start() self._users_flush_timer.start() + self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl, + target_base_uri=config.events_uri, + force_proxy=config.http_proxy) if http is None else http if not config.diagnostic_opt_out: + diagnostic_id = create_diagnostic_id(config) + self._diagnostic_accumulator = _DiagnosticAccumulator(diagnostic_id) + init_event = create_diagnostic_init(self._diagnostic_accumulator.data_since_date, diagnostic_id, config) + DiagnosticEventSendTask(self._http, config, init_event, None).run_thread() + self._diagnostic_event_timer = RepeatingTimer(config.diagnostic_recording_interval, self._send_diagnostic) self._diagnostic_event_timer.start() + else: + self._diagnostic_accumulator = None + self._close_lock = Lock() self._closed = False - (dispatcher_class or EventDispatcher)(self._inbox, config, http) + + (dispatcher_class or EventDispatcher)(self._inbox, config, self._http, self._diagnostic_accumulator) def send_event(self, event): event['creationDate'] = int(time.time() * 1000) @@ -385,6 +434,9 @@ def stop(self): # is full; an orderly shutdown can't happen unless these messages are received. self._post_message_and_wait('stop') + def retrieve_diagnostic_accumulator(self): + return self._diagnostic_accumulator + def _post_to_inbox(self, message): try: self._inbox.put(message, block=False) diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 391e2f52..2016e2d3 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -27,7 +27,7 @@ class StreamingUpdateProcessor(Thread, UpdateProcessor): - def __init__(self, config, requester, store, ready): + def __init__(self, config, requester, store, ready, diagnostic_accumulator): Thread.__init__(self) self.daemon = True self._uri = config.stream_base_uri + STREAM_ALL_PATH @@ -36,6 +36,8 @@ def __init__(self, config, requester, store, ready): self._store = store self._running = False self._ready = ready + self._diagnostic_accumulator = diagnostic_accumulator + self._es_started = None # We need to suppress the default logging behavior of the backoff package, because # it logs messages at ERROR level with variable content (the delay time) which will @@ -52,11 +54,14 @@ def run(self): self._running = True while self._running: try: + self._es_started = int(time.time() * 1000) messages = self._connect() for msg in messages: if not self._running: break message_ok = self.process_message(self._store, self._requester, msg) + self._record_stream_init(False) + self._es_started = None if message_ok is True and self._ready.is_set() is False: log.info("StreamingUpdateProcessor initialized ok.") self._ready.set() @@ -71,6 +76,11 @@ def run(self): # no stacktrace here because, for a typical connection error, it'll just be a lengthy tour of urllib3 internals time.sleep(1) + def _record_stream_init(self, failed): + if self._diagnostic_accumulator and self._es_started: + current_time = int(time.time() * 1000) + self._diagnostic_accumulator.record_stream_init(current_time, current_time - self._es_started, failed) + def _backoff_expo(): return backoff.expo(max_value=30) diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index 61033bec..d6641471 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -62,13 +62,14 @@ def teardown_function(): if ep is not None: ep.stop() -def setup_processor(config): - global ep - ep = DefaultEventProcessor(config, mock_http) - +class DefaultTestProcessor(DefaultEventProcessor): + def __init__(self, **kwargs): + if not 'diagnostic_opt_out' in kwargs: + kwargs['diagnostic_opt_out'] = True + DefaultEventProcessor.__init__(self, Config(**kwargs), mock_http) def test_identify_event_is_queued(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: e = { 'kind': 'identify', 'user': user } ep.send_event(e) @@ -82,7 +83,7 @@ def test_identify_event_is_queued(): }] def test_user_is_filtered_in_identify_event(): - with DefaultEventProcessor(Config(all_attributes_private = True), mock_http) as ep: + with DefaultTestProcessor(all_attributes_private = True) as ep: e = { 'kind': 'identify', 'user': user } ep.send_event(e) @@ -96,7 +97,7 @@ def test_user_is_filtered_in_identify_event(): }] def test_user_attrs_are_stringified_in_identify_event(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: e = { 'kind': 'identify', 'user': numeric_user } ep.send_event(e) @@ -110,7 +111,7 @@ def test_user_attrs_are_stringified_in_identify_event(): }] def test_individual_feature_event_is_queued_with_index_event(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: e = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True @@ -124,7 +125,7 @@ def test_individual_feature_event_is_queued_with_index_event(): check_summary_event(output[2]) def test_user_is_filtered_in_index_event(): - with DefaultEventProcessor(Config(all_attributes_private = True), mock_http) as ep: + with DefaultTestProcessor(all_attributes_private = True) as ep: e = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True @@ -138,7 +139,7 @@ def test_user_is_filtered_in_index_event(): check_summary_event(output[2]) def test_user_attrs_are_stringified_in_index_event(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: e = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': numeric_user, 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True @@ -152,7 +153,7 @@ def test_user_attrs_are_stringified_in_index_event(): check_summary_event(output[2]) def test_feature_event_can_contain_inline_user(): - with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + with DefaultTestProcessor(inline_users_in_events = True) as ep: e = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True @@ -165,7 +166,7 @@ def test_feature_event_can_contain_inline_user(): check_summary_event(output[1]) def test_user_is_filtered_in_feature_event(): - with DefaultEventProcessor(Config(inline_users_in_events = True, all_attributes_private = True), mock_http) as ep: + with DefaultTestProcessor(inline_users_in_events = True, all_attributes_private = True) as ep: e = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True @@ -178,7 +179,7 @@ def test_user_is_filtered_in_feature_event(): check_summary_event(output[1]) def test_user_attrs_are_stringified_in_feature_event(): - with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + with DefaultTestProcessor(inline_users_in_events = True) as ep: e = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': numeric_user, 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True @@ -191,7 +192,7 @@ def test_user_attrs_are_stringified_in_feature_event(): check_summary_event(output[1]) def test_index_event_is_still_generated_if_inline_users_is_true_but_feature_event_is_not_tracked(): - with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + with DefaultTestProcessor(inline_users_in_events = True) as ep: e = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': False @@ -204,7 +205,7 @@ def test_index_event_is_still_generated_if_inline_users_is_true_but_feature_even check_summary_event(output[1]) def test_two_events_for_same_user_only_produce_one_index_event(): - with DefaultEventProcessor(Config(user_keys_flush_interval = 300), mock_http) as ep: + with DefaultTestProcessor(user_keys_flush_interval = 300) as ep: e0 = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True @@ -221,7 +222,7 @@ def test_two_events_for_same_user_only_produce_one_index_event(): check_summary_event(output[3]) def test_new_index_event_is_added_if_user_cache_has_been_cleared(): - with DefaultEventProcessor(Config(user_keys_flush_interval = 0.1), mock_http) as ep: + with DefaultTestProcessor(user_keys_flush_interval = 0.1) as ep: e0 = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True @@ -240,7 +241,7 @@ def test_new_index_event_is_added_if_user_cache_has_been_cleared(): check_summary_event(output[4]) def test_event_kind_is_debug_if_flag_is_temporarily_in_debug_mode(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: future_time = now() + 100000 e = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, @@ -256,7 +257,7 @@ def test_event_kind_is_debug_if_flag_is_temporarily_in_debug_mode(): check_summary_event(output[2]) def test_event_can_be_both_tracked_and_debugged(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: future_time = now() + 100000 e = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, @@ -273,7 +274,7 @@ def test_event_can_be_both_tracked_and_debugged(): check_summary_event(output[3]) def test_debug_mode_expires_based_on_client_time_if_client_time_is_later_than_server_time(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: # Pick a server time that is somewhat behind the client time server_time = now() - 20000 @@ -299,7 +300,7 @@ def test_debug_mode_expires_based_on_client_time_if_client_time_is_later_than_se check_summary_event(output[1]) def test_debug_mode_expires_based_on_server_time_if_server_time_is_later_than_client_time(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: # Pick a server time that is somewhat ahead of the client time server_time = now() + 20000 @@ -325,7 +326,7 @@ def test_debug_mode_expires_based_on_server_time_if_server_time_is_later_than_cl check_summary_event(output[1]) def test_two_feature_events_for_same_user_generate_only_one_index_event(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: e1 = { 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, 'variation': 1, 'value': 'value1', 'default': 'default', 'trackEvents': False @@ -343,7 +344,7 @@ def test_two_feature_events_for_same_user_generate_only_one_index_event(): check_summary_event(output[1]) def test_nontracked_events_are_summarized(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: e1 = { 'kind': 'feature', 'key': 'flagkey1', 'version': 11, 'user': user, 'variation': 1, 'value': 'value1', 'default': 'default1', 'trackEvents': False @@ -374,7 +375,7 @@ def test_nontracked_events_are_summarized(): } def test_custom_event_is_queued_with_user(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '}, 'metricValue': 1.5 } ep.send_event(e) @@ -384,7 +385,7 @@ def test_custom_event_is_queued_with_user(): check_custom_event(output[1], e, None) def test_custom_event_can_contain_inline_user(): - with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + with DefaultTestProcessor(inline_users_in_events = True) as ep: e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '} } ep.send_event(e) @@ -393,7 +394,7 @@ def test_custom_event_can_contain_inline_user(): check_custom_event(output[0], e, user) def test_user_is_filtered_in_custom_event(): - with DefaultEventProcessor(Config(inline_users_in_events = True, all_attributes_private = True), mock_http) as ep: + with DefaultTestProcessor(inline_users_in_events = True, all_attributes_private = True) as ep: e = { 'kind': 'custom', 'key': 'eventkey', 'user': user, 'data': { 'thing': 'stuff '} } ep.send_event(e) @@ -402,7 +403,7 @@ def test_user_is_filtered_in_custom_event(): check_custom_event(output[0], e, filtered_user) def test_user_attrs_are_stringified_in_custom_event(): - with DefaultEventProcessor(Config(inline_users_in_events = True), mock_http) as ep: + with DefaultTestProcessor(inline_users_in_events = True) as ep: e = { 'kind': 'custom', 'key': 'eventkey', 'user': numeric_user, 'data': { 'thing': 'stuff '} } ep.send_event(e) @@ -411,13 +412,13 @@ def test_user_attrs_are_stringified_in_custom_event(): check_custom_event(output[0], e, stringified_numeric_user) def test_nothing_is_sent_if_there_are_no_events(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: ep.flush() ep._wait_until_inactive() assert mock_http.request_data is None def test_sdk_key_is_sent(): - with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: + with DefaultTestProcessor(sdk_key = 'SDK_KEY') as ep: ep.send_event({ 'kind': 'identify', 'user': user }) ep.flush() ep._wait_until_inactive() @@ -425,7 +426,7 @@ def test_sdk_key_is_sent(): assert mock_http.request_headers.get('Authorization') == 'SDK_KEY' def test_wrapper_header_not_sent_when_not_set(): - with DefaultEventProcessor(Config(), mock_http) as ep: + with DefaultTestProcessor() as ep: ep.send_event({ 'kind': 'identify', 'user': user }) ep.flush() ep._wait_until_inactive() @@ -433,7 +434,7 @@ def test_wrapper_header_not_sent_when_not_set(): assert mock_http.request_headers.get('X-LaunchDarkly-Wrapper') is None def test_wrapper_header_sent_when_set(): - with DefaultEventProcessor(Config(wrapper_name = "Flask", wrapper_version = "0.0.1"), mock_http) as ep: + with DefaultTestProcessor(wrapper_name = "Flask", wrapper_version = "0.0.1") as ep: ep.send_event({ 'kind': 'identify', 'user': user }) ep.flush() ep._wait_until_inactive() @@ -441,7 +442,7 @@ def test_wrapper_header_sent_when_set(): assert mock_http.request_headers.get('X-LaunchDarkly-Wrapper') == "Flask/0.0.1" def test_wrapper_header_sent_without_version(): - with DefaultEventProcessor(Config(wrapper_name = "Flask"), mock_http) as ep: + with DefaultTestProcessor(wrapper_name = "Flask") as ep: ep.send_event({ 'kind': 'identify', 'user': user }) ep.flush() ep._wait_until_inactive() @@ -468,7 +469,7 @@ def test_does_not_block_on_full_inbox(): ep_inbox_holder = [ None ] ep_inbox = None - def dispatcher_factory(inbox, config, http): + def dispatcher_factory(inbox, config, http, diag): ep_inbox_holder[0] = inbox # it's an array because otherwise it's hard for a closure to modify a variable return None # the dispatcher object itself doesn't matter, we only manipulate the inbox def event_consumer(): @@ -540,7 +541,7 @@ def _verify_https_proxy_is_used(server, config): assert req.method == 'CONNECT' def verify_unrecoverable_http_error(status): - with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: + with DefaultTestProcessor(sdk_key = 'SDK_KEY') as ep: mock_http.set_response_status(status) ep.send_event({ 'kind': 'identify', 'user': user }) ep.flush() @@ -553,7 +554,7 @@ def verify_unrecoverable_http_error(status): assert mock_http.request_data is None def verify_recoverable_http_error(status): - with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: + with DefaultTestProcessor(sdk_key = 'SDK_KEY') as ep: mock_http.set_response_status(status) ep.send_event({ 'kind': 'identify', 'user': user }) ep.flush() diff --git a/testing/test_streaming.py b/testing/test_streaming.py index 37cf0148..0adf6738 100644 --- a/testing/test_streaming.py +++ b/testing/test_streaming.py @@ -22,7 +22,7 @@ def test_uses_stream_uri(): config = Config(sdk_key = 'sdk-key', stream_uri = server.uri) server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) - with StreamingUpdateProcessor(config, None, store, ready) as sp: + with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() req = server.await_request() assert req.method == 'GET' @@ -37,7 +37,7 @@ def test_sends_headers(): config = Config(sdk_key = 'sdk-key', stream_uri = server.uri) server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) - with StreamingUpdateProcessor(config, None, store, ready) as sp: + with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() req = server.await_request() assert req.headers.get('Authorization') == 'sdk-key' @@ -53,7 +53,7 @@ def test_sends_wrapper_header(): wrapper_name = 'Flask', wrapper_version = '0.1.0') server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) - with StreamingUpdateProcessor(config, None, store, ready) as sp: + with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() req = server.await_request() assert req.headers.get('X-LaunchDarkly-Wrapper') == 'Flask/0.1.0' @@ -67,7 +67,7 @@ def test_sends_wrapper_header_without_version(): wrapper_name = 'Flask') server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) - with StreamingUpdateProcessor(config, None, store, ready) as sp: + with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() req = server.await_request() assert req.headers.get('X-LaunchDarkly-Wrapper') == 'Flask' @@ -98,7 +98,7 @@ def _verify_http_proxy_is_used(server, config): store = InMemoryFeatureStore() ready = Event() server.setup_response(config.stream_base_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) - with StreamingUpdateProcessor(config, None, store, ready) as sp: + with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the # HTTP client, so we should be able to see the request go through. Note that the URI path will @@ -112,7 +112,7 @@ def _verify_https_proxy_is_used(server, config): store = InMemoryFeatureStore() ready = Event() server.setup_response(config.stream_base_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) - with StreamingUpdateProcessor(config, None, store, ready) as sp: + with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but # it can still record that it *got* the request, which proves that the request went to the proxy. From e50ad29e2cb97307683c4ebc6676da19e8a69311 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Mon, 30 Dec 2019 21:57:21 +0000 Subject: [PATCH 173/190] Track events in last batch. --- ldclient/event_processor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index c7ce1b27..7d88b64c 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -351,6 +351,8 @@ def _trigger_flush(self): if self._disabled: return payload = self._outbox.get_payload() + if self._diagnostic_accumulator: + self._diagnostic_accumulator.record_events_in_batch(len(payload.events)) if len(payload.events) > 0 or len(payload.summary.counters) > 0: task = EventPayloadSendTask(self._http, self._config, self._formatter, payload, self._handle_response) @@ -375,7 +377,7 @@ def _handle_response(self, r): return def _send_and_reset_diagnostics(self): - if (self._diagnostic_accumulator): + if self._diagnostic_accumulator: dropped_event_count = self._outbox.get_and_clear_dropped_count() stats_event = self._diagnostic_accumulator.create_event_and_reset(dropped_event_count, self._deduplicated_users) self._deduplicated_users = 0 From f6ad20136112fa18d220da04e9afc7a82d8ad075 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Mon, 30 Dec 2019 22:07:33 +0000 Subject: [PATCH 174/190] Fix sdk version field, some stylistic improvements. --- ldclient/diagnostics.py | 4 +++- ldclient/event_processor.py | 8 ++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py index 3acb96fd..62913e45 100644 --- a/ldclient/diagnostics.py +++ b/ldclient/diagnostics.py @@ -6,6 +6,8 @@ import time import uuid +from ldclient.version import VERSION + class _DiagnosticAccumulator(object): def __init__(self, diagnostic_id): self.diagnostic_id = diagnostic_id @@ -81,7 +83,7 @@ def _create_diagnostic_config_object(config): def _create_diagnostic_sdk_object(config): return {'name': 'python-server-sdk', - 'version': 6, #VERSION, + 'version': VERSION, 'wrapperName': config.wrapper_name, 'wrapperVersion': config.wrapper_version} diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 7d88b64c..2045d5de 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -224,7 +224,7 @@ def __init__(self, capacity): def add_event(self, event): if len(self._events) >= self._capacity: - self._dropped_events = self._dropped_events + 1 + self._dropped_events += 1 if not self._exceeded_capacity: log.warning("Exceeded event queue capacity. Increase capacity to avoid dropping events.") self._exceeded_capacity = True @@ -236,9 +236,9 @@ def add_to_summary(self, event): self._summarizer.summarize_event(event) def get_and_clear_dropped_count(self): - ret = self._dropped_events + dropped_count = self._dropped_events self._dropped_events = 0 - return ret + return dropped_count def get_payload(self): return FlushPayload(self._events, self._summarizer.snapshot()) @@ -319,7 +319,7 @@ def _process_event(self, event): already_seen = self.notice_user(user) add_index_event = not is_index_event and not already_seen if not is_index_event and already_seen: - self._deduplicated_users = self._deduplicated_users + 1 + self._deduplicated_users += 1 if add_index_event: ie = { 'kind': 'index', 'creationDate': event['creationDate'], 'user': user } From 0375f70f81e7e6c44975946f5a63fb252cc1de06 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Tue, 31 Dec 2019 17:37:30 +0000 Subject: [PATCH 175/190] Last of diagnostic configuration object fields. --- ldclient/diagnostics.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py index 62913e45..b8524e81 100644 --- a/ldclient/diagnostics.py +++ b/ldclient/diagnostics.py @@ -73,13 +73,11 @@ def _create_diagnostic_config_object(config): 'usingRelayDaemon': config.use_ldd, 'allAttributesPrivate': config.all_attributes_private, 'pollingIntervalMillis': config.poll_interval * 1000, - #'reconnectTimeMillis': check, 'userKeysCapacity': config.user_keys_capacity, 'userKeysFlushIntervalMillis': config.user_keys_flush_interval * 1000, 'inlineUsersInEvents': config.inline_users_in_events, 'diagnosticRecordingIntervalMillis': config.diagnostic_recording_interval * 1000, - #'featureStoreFactory': check, - } + 'featureStoreFactory': config.feature_store.__class__.__name__} def _create_diagnostic_sdk_object(config): return {'name': 'python-server-sdk', From 5f2ca11d95405f117e9b08b7ed59b6d076b14b4c Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Tue, 31 Dec 2019 17:55:19 +0000 Subject: [PATCH 176/190] Fill out rest of platform fields. --- ldclient/diagnostics.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py index b8524e81..2c3a0437 100644 --- a/ldclient/diagnostics.py +++ b/ldclient/diagnostics.py @@ -5,6 +5,7 @@ import threading import time import uuid +import platform from ldclient.version import VERSION @@ -86,4 +87,9 @@ def _create_diagnostic_sdk_object(config): 'wrapperVersion': config.wrapper_version} def _create_diagnostic_platform_object(): - return {'name': 'python'} + return {'name': 'python', + 'osArch': platform.machine(), + 'osName': platform.system(), + 'osVersion': platform.release(), + 'pythonVersion': platform.python_version(), + 'pythonImplementation': platform.python_implementation()} From 49a4ea9f0a4cf265d9cf426d7a10aaf1edf437b0 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Tue, 31 Dec 2019 18:15:45 +0000 Subject: [PATCH 177/190] Cleanup and failed stream initialization tracking. --- ldclient/diagnostics.py | 4 ---- ldclient/event_processor.py | 7 +------ ldclient/streaming.py | 9 +++++++-- 3 files changed, 8 insertions(+), 12 deletions(-) diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py index 2c3a0437..751356dd 100644 --- a/ldclient/diagnostics.py +++ b/ldclient/diagnostics.py @@ -1,7 +1,3 @@ -#DEFAULT_CONFIG = Config.default() -#DEFAULT_BASE_URI = DEFAULT_CONFIG.base_uri -#DEFAULT_EVENTS_URI = DEFAULT_CONFIG.events_uri -#DEFAULT_STREAM_BASE_URI = DEFAULT_CONFIG.stream_base_uri import threading import time import uuid diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 2045d5de..a89be59b 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -185,12 +185,7 @@ def __init__(self, http, config, event_body, response_fn): self._response_fn = response_fn def run_thread(self): - try: - Thread(target = self._do_send()).start() - except Exception: - log.warning( - 'Unhandled exception in event processor. Analytics events were not processed.', - exc_info=True) + Thread(target = self._do_send).start() def _do_send(self): # noinspection PyBroadException diff --git a/ldclient/streaming.py b/ldclient/streaming.py index 2016e2d3..c159571a 100644 --- a/ldclient/streaming.py +++ b/ldclient/streaming.py @@ -60,19 +60,24 @@ def run(self): if not self._running: break message_ok = self.process_message(self._store, self._requester, msg) - self._record_stream_init(False) - self._es_started = None + if message_ok: + self._record_stream_init(False) + self._es_started = None if message_ok is True and self._ready.is_set() is False: log.info("StreamingUpdateProcessor initialized ok.") self._ready.set() except UnsuccessfulResponseException as e: log.error(http_error_message(e.status, "stream connection")) + self._record_stream_init(True) + self._es_started = None if not is_http_error_recoverable(e.status): self._ready.set() # if client is initializing, make it stop waiting; has no effect if already inited self.stop() break except Exception as e: log.warning("Caught exception. Restarting stream connection after one second. %s" % e) + self._record_stream_init(True) + self._es_started = None # no stacktrace here because, for a typical connection error, it'll just be a lengthy tour of urllib3 internals time.sleep(1) From 6f9ca76855e162e7fc96d98ef09614738f800c08 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Tue, 31 Dec 2019 18:44:04 +0000 Subject: [PATCH 178/190] Add diagnostic config option test. --- testing/test_config.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/testing/test_config.py b/testing/test_config.py index a5f5e065..88add31c 100644 --- a/testing/test_config.py +++ b/testing/test_config.py @@ -15,9 +15,17 @@ def test_copy_config(): assert new_config.stream is False def test_can_set_valid_poll_interval(): - config = Config(sdk_key = "SDK_KEY", poll_interval = 31) - assert config.poll_interval == 31 + config = Config(sdk_key = "SDK_KEY", poll_interval = 31) + assert config.poll_interval == 31 def test_minimum_poll_interval_is_enforced(): - config = Config(sdk_key = "SDK_KEY", poll_interval = 29) - assert config.poll_interval == 30 + config = Config(sdk_key = "SDK_KEY", poll_interval = 29) + assert config.poll_interval == 30 + +def test_can_set_valid_diagnostic_interval(): + config = Config(sdk_key = "SDK_KEY", diagnostic_recording_interval=61) + assert config.diagnostic_recording_interval == 61 + +def test_minimum_diagnostic_interval_is_enforced(): + config = Config(sdk_key = "SDK_KEY", diagnostic_recording_interval=59) + assert config.diagnostic_recording_interval == 60 From aa703fb2ca73c21538e22db569b4f3f9fbd7d54c Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Tue, 31 Dec 2019 19:08:42 +0000 Subject: [PATCH 179/190] Add tests for diagnostics.py --- ldclient/diagnostics.py | 7 +++- testing/test_diagnostics.py | 76 +++++++++++++++++++++++++++++++++++++ 2 files changed, 82 insertions(+), 1 deletion(-) create mode 100644 testing/test_diagnostics.py diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py index 751356dd..4ce593b2 100644 --- a/ldclient/diagnostics.py +++ b/ldclient/diagnostics.py @@ -1,3 +1,8 @@ +""" +Implementation details of the diagnostic event generation. +""" +# currently excluded from documentation - see docs/README.md + import threading import time import uuid @@ -32,7 +37,7 @@ def create_event_and_reset(self, dropped_events, deduplicated_users): current_time = int(time.time() * 1000) periodic_event = _diagnostic_base_fields('diagnostic', current_time, self.diagnostic_id) - periodic_event.update({'dataSincedate': self.data_since_date, + periodic_event.update({'dataSinceDate': self.data_since_date, 'droppedEvents': dropped_events, 'deduplicatedUsers': deduplicated_users, 'eventsInLastBatch': events_in_batch, diff --git a/testing/test_diagnostics.py b/testing/test_diagnostics.py new file mode 100644 index 00000000..77c49a2e --- /dev/null +++ b/testing/test_diagnostics.py @@ -0,0 +1,76 @@ +import json +import uuid + +from ldclient.config import Config +from ldclient.diagnostics import create_diagnostic_id, create_diagnostic_init, _DiagnosticAccumulator + +def test_create_diagnostic_id(): + test_config = Config(sdk_key = "SDK_KEY") + diag_id = create_diagnostic_id(test_config); + assert len(diag_id) == 2 + uid = diag_id['diagnosticId'] + # Will throw if invalid UUID4 + uuid.UUID('urn:uuid:' + uid) + assert diag_id['sdkKeySuffix'] == 'DK_KEY' + +def test_create_diagnostic_init(): + test_config = Config(sdk_key = "SDK_KEY", wrapper_name='django', wrapper_version = '5.1.1') + diag_id = create_diagnostic_id(test_config); + diag_init = create_diagnostic_init(100, diag_id, test_config) + assert len(diag_init) == 6 + assert diag_init['kind'] == 'diagnostic-init' + assert diag_init['id'] == diag_id + assert diag_init['creationDate'] == 100 + assert diag_init['sdk'] + assert diag_init['platform'] + assert diag_init['configuration'] + + # Verify converts to json without failure + json.dumps(diag_init) + +def test_diagnostic_accumulator(): + test_config = Config(sdk_key = "SDK_KEY") + diag_id = create_diagnostic_id(test_config); + diag_accum = _DiagnosticAccumulator(diag_id) + + # Test default periodic event + def_diag_event = diag_accum.create_event_and_reset(0, 0) + assert len(def_diag_event) == 8 + assert def_diag_event['kind'] == 'diagnostic' + assert def_diag_event['id'] == diag_id + assert def_diag_event['creationDate'] == diag_accum.data_since_date + assert def_diag_event['dataSinceDate'] + assert def_diag_event['droppedEvents'] == 0 + assert def_diag_event['deduplicatedUsers'] == 0 + assert def_diag_event['eventsInLastBatch'] == 0 + assert def_diag_event['streamInits'] == [] + + # Verify converts to json without failure + json.dumps(def_diag_event) + + # Test periodic event after recording values + diag_accum.record_stream_init(100, 100, False) + diag_accum.record_stream_init(300, 200, True) + diag_accum.record_events_in_batch(10) + diag_accum.record_events_in_batch(50) + diag_event = diag_accum.create_event_and_reset(10, 15) + assert len(diag_event) == 8 + assert diag_event['kind'] == 'diagnostic' + assert diag_event['id'] == diag_id + assert diag_event['creationDate'] == diag_accum.data_since_date + assert diag_event['dataSinceDate'] == def_diag_event['creationDate'] + assert diag_event['droppedEvents'] == 10 + assert diag_event['deduplicatedUsers'] == 15 + assert diag_event['eventsInLastBatch'] == 50 + assert diag_event['streamInits'] == [{'timestamp': 100, 'durationMillis': 100, 'failed': False}, + {'timestamp': 300, 'durationMillis': 200, 'failed': True}] + json.dumps(diag_event) + + reset_diag_event = diag_accum.create_event_and_reset(0, 0) + assert reset_diag_event['creationDate'] == diag_accum.data_since_date + assert reset_diag_event['dataSinceDate'] == diag_event['creationDate'] + del reset_diag_event['creationDate'] + del def_diag_event['creationDate'] + del reset_diag_event['dataSinceDate'] + del def_diag_event['dataSinceDate'] + assert reset_diag_event == def_diag_event From 18d73407ecb9cce40e3263d0728099fdc9a4cef0 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Tue, 31 Dec 2019 19:34:57 +0000 Subject: [PATCH 180/190] Testing rest of diagnostic fields. --- testing/test_diagnostics.py | 67 ++++++++++++++++++++++++++++++++++--- 1 file changed, 63 insertions(+), 4 deletions(-) diff --git a/testing/test_diagnostics.py b/testing/test_diagnostics.py index 77c49a2e..0a218094 100644 --- a/testing/test_diagnostics.py +++ b/testing/test_diagnostics.py @@ -2,7 +2,7 @@ import uuid from ldclient.config import Config -from ldclient.diagnostics import create_diagnostic_id, create_diagnostic_init, _DiagnosticAccumulator +from ldclient.diagnostics import create_diagnostic_id, create_diagnostic_init, _DiagnosticAccumulator, _create_diagnostic_config_object def test_create_diagnostic_id(): test_config = Config(sdk_key = "SDK_KEY") @@ -21,13 +21,72 @@ def test_create_diagnostic_init(): assert diag_init['kind'] == 'diagnostic-init' assert diag_init['id'] == diag_id assert diag_init['creationDate'] == 100 - assert diag_init['sdk'] - assert diag_init['platform'] - assert diag_init['configuration'] + + assert diag_init['sdk']['name'] == 'python-server-sdk' + assert diag_init['sdk']['version'] + assert diag_init['sdk']['wrapperName'] == 'django' + assert diag_init['sdk']['wrapperVersion'] == '5.1.1' + + assert len(diag_init['platform']) == 6 + assert diag_init['platform']['name'] == 'python' + assert all(x in diag_init['platform'].keys() for x in ['osArch', 'osName', 'osVersion', 'pythonVersion', 'pythonImplementation']) + + assert diag_init['configuration'] == _create_diagnostic_config_object(test_config) # Verify converts to json without failure json.dumps(diag_init) +def test_create_diagnostic_config_defaults(): + test_config = Config() + diag_config = _create_diagnostic_config_object(test_config) + + assert len(diag_config) == 17 + assert diag_config['customBaseURI'] is False + assert diag_config['customEventsURI'] is False + assert diag_config['customStreamURI'] is False + assert diag_config['eventsCapacity'] == 10000 + assert diag_config['connectTimeoutMillis'] == 10000 + assert diag_config['socketTimeoutMillis'] == 15000 + assert diag_config['eventsFlushIntervalMillis'] == 5000 + assert diag_config['usingProxy'] is False + assert diag_config['streamingDisabled'] is False + assert diag_config['usingRelayDaemon'] is False + assert diag_config['allAttributesPrivate'] is False + assert diag_config['pollingIntervalMillis'] == 30000 + assert diag_config['userKeysCapacity'] == 1000 + assert diag_config['userKeysFlushIntervalMillis'] == 300000 + assert diag_config['inlineUsersInEvents'] is False + assert diag_config['diagnosticRecordingIntervalMillis'] == 900000 + assert diag_config['featureStoreFactory'] == 'InMemoryFeatureStore' + +def test_create_diagnostic_config_custom(): + test_config = Config(base_uri='https://test.com', events_uri='https://test.com', + connect_timeout=1, read_timeout=1, events_max_pending=10, + flush_interval=1, stream_uri='https://test.com', + stream=False, poll_interval=60, use_ldd=True, feature_store = 5, + all_attributes_private=True, user_keys_capacity=10, user_keys_flush_interval=60, + inline_users_in_events=True, http_proxy='', diagnostic_recording_interval=60) + diag_config = _create_diagnostic_config_object(test_config) + + assert len(diag_config) == 17 + assert diag_config['customBaseURI'] is True + assert diag_config['customEventsURI'] is True + assert diag_config['customStreamURI'] is True + assert diag_config['eventsCapacity'] == 10 + assert diag_config['connectTimeoutMillis'] == 1000 + assert diag_config['socketTimeoutMillis'] == 1000 + assert diag_config['eventsFlushIntervalMillis'] == 1000 + assert diag_config['usingProxy'] is True + assert diag_config['streamingDisabled'] is True + assert diag_config['usingRelayDaemon'] is True + assert diag_config['allAttributesPrivate'] is True + assert diag_config['pollingIntervalMillis'] == 60000 + assert diag_config['userKeysCapacity'] == 10 + assert diag_config['userKeysFlushIntervalMillis'] == 60000 + assert diag_config['inlineUsersInEvents'] is True + assert diag_config['diagnosticRecordingIntervalMillis'] == 60000 + assert diag_config['featureStoreFactory'] == 'int' + def test_diagnostic_accumulator(): test_config = Config(sdk_key = "SDK_KEY") diag_id = create_diagnostic_id(test_config); From c6904c763991d90bb5ce99c84d856fedb62fb5bd Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Tue, 31 Dec 2019 21:35:29 +0000 Subject: [PATCH 181/190] Test that streaming update processor records successful and unsuccessful connection attempts in the diagnostic accumulator when available. --- testing/test_streaming.py | 48 ++++++++++++++++++++++++++++++++++----- 1 file changed, 42 insertions(+), 6 deletions(-) diff --git a/testing/test_streaming.py b/testing/test_streaming.py index 0adf6738..229248a7 100644 --- a/testing/test_streaming.py +++ b/testing/test_streaming.py @@ -1,6 +1,7 @@ from threading import Event from ldclient.config import Config +from ldclient.diagnostics import _DiagnosticAccumulator from ldclient.feature_store import InMemoryFeatureStore from ldclient.streaming import StreamingUpdateProcessor from ldclient.version import VERSION @@ -8,6 +9,7 @@ fake_event = 'event:put\ndata: {"data":{"flags":{},"segments":{}}}\n\n' +response_headers = { 'Content-Type': 'text/event-stream' } # Note that our simple HTTP stub server implementation does not actually do streaming responses, so # in these tests the connection will get closed after the response, causing the streaming processor @@ -20,7 +22,7 @@ def test_uses_stream_uri(): with start_server() as server: config = Config(sdk_key = 'sdk-key', stream_uri = server.uri) - server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + server.setup_response('/all', 200, fake_event, response_headers) with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() @@ -35,7 +37,7 @@ def test_sends_headers(): with start_server() as server: config = Config(sdk_key = 'sdk-key', stream_uri = server.uri) - server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + server.setup_response('/all', 200, fake_event, response_headers) with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() @@ -51,7 +53,7 @@ def test_sends_wrapper_header(): with start_server() as server: config = Config(sdk_key = 'sdk-key', stream_uri = server.uri, wrapper_name = 'Flask', wrapper_version = '0.1.0') - server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + server.setup_response('/all', 200, fake_event, response_headers) with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() @@ -65,7 +67,7 @@ def test_sends_wrapper_header_without_version(): with start_server() as server: config = Config(sdk_key = 'sdk-key', stream_uri = server.uri, wrapper_name = 'Flask') - server.setup_response('/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + server.setup_response('/all', 200, fake_event, response_headers) with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() @@ -97,7 +99,7 @@ def test_can_use_https_proxy_via_config(): def _verify_http_proxy_is_used(server, config): store = InMemoryFeatureStore() ready = Event() - server.setup_response(config.stream_base_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + server.setup_response(config.stream_base_uri + '/all', 200, fake_event, response_headers) with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() # For an insecure proxy request, our stub server behaves enough like the real thing to satisfy the @@ -111,10 +113,44 @@ def _verify_http_proxy_is_used(server, config): def _verify_https_proxy_is_used(server, config): store = InMemoryFeatureStore() ready = Event() - server.setup_response(config.stream_base_uri + '/all', 200, fake_event, { 'Content-Type': 'text/event-stream' }) + server.setup_response(config.stream_base_uri + '/all', 200, fake_event, response_headers) with StreamingUpdateProcessor(config, None, store, ready, None) as sp: sp.start() # Our simple stub server implementation can't really do HTTPS proxying, so the request will fail, but # it can still record that it *got* the request, which proves that the request went to the proxy. req = server.await_request() assert req.method == 'CONNECT' + +def test_records_diagnostic_on_stream_init_success(): + store = InMemoryFeatureStore() + ready = Event() + with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = server.uri) + server.setup_response('/all', 200, fake_event, response_headers) + diag_accum = _DiagnosticAccumulator(1) + + with StreamingUpdateProcessor(config, None, store, ready, diag_accum) as sp: + sp.start() + server.await_request() + server.await_request() + recorded_inits = diag_accum.create_event_and_reset(0, 0)['streamInits'] + + assert len(recorded_inits) == 1 + assert recorded_inits[0]['failed'] is False + +def test_records_diagnostic_on_stream_init_failure(): + store = InMemoryFeatureStore() + ready = Event() + with start_server() as server: + config = Config(sdk_key = 'sdk-key', stream_uri = server.uri) + server.setup_response('/all', 200, 'event:put\ndata: {\n\n', response_headers) + diag_accum = _DiagnosticAccumulator(1) + + with StreamingUpdateProcessor(config, None, store, ready, diag_accum) as sp: + sp.start() + server.await_request() + server.await_request() + recorded_inits = diag_accum.create_event_and_reset(0, 0)['streamInits'] + + assert len(recorded_inits) == 1 + assert recorded_inits[0]['failed'] is True From 0f9f65c7f1e90dcad1e2e6f2110cbe95cb0ca503 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Thu, 2 Jan 2020 18:02:52 +0000 Subject: [PATCH 182/190] Improvements to testability of event processor. --- ldclient/event_processor.py | 33 ++++++++++++++++++--------------- testing/test_event_processor.py | 6 ++++++ 2 files changed, 24 insertions(+), 15 deletions(-) diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index a89be59b..69e1807e 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -178,16 +178,12 @@ def _do_send(self, output_events): class DiagnosticEventSendTask(object): - def __init__(self, http, config, event_body, response_fn): + def __init__(self, http, config, event_body): self._http = http self._config = config self._event_body = event_body - self._response_fn = response_fn - - def run_thread(self): - Thread(target = self._do_send).start() - def _do_send(self): + def run(self): # noinspection PyBroadException try: json_body = json.dumps(self._event_body) @@ -259,6 +255,13 @@ def __init__(self, inbox, config, http_client, diagnostic_accumulator=None): self._diagnostic_accumulator = diagnostic_accumulator self._flush_workers = FixedThreadPool(__MAX_FLUSH_THREADS__, "ldclient.flush") + self._diagnostic_flush_workers = FixedThreadPool(1, "ldclient.diag_flush") if not config.diagnostic_opt_out else None + if not config.diagnostic_opt_out: + init_event = create_diagnostic_init(diagnostic_accumulator.data_since_date, + diagnostic_accumulator.diagnostic_id, + config) + task = DiagnosticEventSendTask(self._http, self._config, init_event) + self._diagnostic_flush_workers.execute(task.run) self._main_thread = Thread(target=self._run_main_loop) self._main_thread.daemon = True @@ -279,6 +282,8 @@ def _run_main_loop(self): self._send_and_reset_diagnostics() elif message.type == 'test_sync': self._flush_workers.wait() + if not self._config.diagnostic_opt_out: + self._diagnostic_flush_workers.wait() message.param.set() elif message.type == 'stop': self._do_shutdown() @@ -372,11 +377,12 @@ def _handle_response(self, r): return def _send_and_reset_diagnostics(self): - if self._diagnostic_accumulator: + if not self._config.diagnostic_opt_out: dropped_event_count = self._outbox.get_and_clear_dropped_count() stats_event = self._diagnostic_accumulator.create_event_and_reset(dropped_event_count, self._deduplicated_users) self._deduplicated_users = 0 - DiagnosticEventSendTask(self._http, self._config, stats_event, None).run_thread() + task = DiagnosticEventSendTask(self._http, self._config, stats_event) + self._diagnostic_flush_workers.execute(task.run) def _do_shutdown(self): self._flush_workers.stop() @@ -393,24 +399,19 @@ def __init__(self, config, http=None, dispatcher_class=None): self._users_flush_timer = RepeatingTimer(config.user_keys_flush_interval, self._flush_users) self._flush_timer.start() self._users_flush_timer.start() - self._http = create_http_pool_manager(num_pools=1, verify_ssl=config.verify_ssl, - target_base_uri=config.events_uri, - force_proxy=config.http_proxy) if http is None else http if not config.diagnostic_opt_out: diagnostic_id = create_diagnostic_id(config) self._diagnostic_accumulator = _DiagnosticAccumulator(diagnostic_id) - init_event = create_diagnostic_init(self._diagnostic_accumulator.data_since_date, diagnostic_id, config) - DiagnosticEventSendTask(self._http, config, init_event, None).run_thread() - self._diagnostic_event_timer = RepeatingTimer(config.diagnostic_recording_interval, self._send_diagnostic) self._diagnostic_event_timer.start() else: + self._diagnostic_event_timer = None self._diagnostic_accumulator = None self._close_lock = Lock() self._closed = False - (dispatcher_class or EventDispatcher)(self._inbox, config, self._http, self._diagnostic_accumulator) + (dispatcher_class or EventDispatcher)(self._inbox, config, http, self._diagnostic_accumulator) def send_event(self, event): event['creationDate'] = int(time.time() * 1000) @@ -426,6 +427,8 @@ def stop(self): self._closed = True self._flush_timer.stop() self._users_flush_timer.stop() + if self._diagnostic_event_timer: + self._diagnostic_event_timer.stop() self.flush() # Note that here we are not calling _post_to_inbox, because we *do* want to wait if the inbox # is full; an orderly shutdown can't happen unless these messages are received. diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index d6641471..b6fe5024 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -449,6 +449,12 @@ def test_wrapper_header_sent_without_version(): assert mock_http.request_headers.get('X-LaunchDarkly-Wrapper') == "Flask" +def test_sdk_key_is_sent_on_diagnostic_request(): + with DefaultTestProcessor(sdk_key = 'SDK_KEY', diagnostic_opt_out=False) as ep: + ep._wait_until_inactive() + + assert mock_http.request_headers.get('Authorization') == 'SDK_KEY' + def test_no_more_payloads_are_sent_after_401_error(): verify_unrecoverable_http_error(401) From 689b231752f8a62ba13432735fa8a2df7f6af477 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Thu, 2 Jan 2020 21:14:28 +0000 Subject: [PATCH 183/190] Rest of event processor tests. --- testing/test_event_processor.py | 57 ++++++++++++++++++++++++++++++++- testing/test_streaming.py | 1 - 2 files changed, 56 insertions(+), 2 deletions(-) diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index b6fe5024..b015433b 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -449,12 +449,67 @@ def test_wrapper_header_sent_without_version(): assert mock_http.request_headers.get('X-LaunchDarkly-Wrapper') == "Flask" +def test_event_schema_set_on_event_send(): + with DefaultTestProcessor() as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + assert mock_http.request_headers.get('X-LaunchDarkly-Event-Schema') == "3" + def test_sdk_key_is_sent_on_diagnostic_request(): with DefaultTestProcessor(sdk_key = 'SDK_KEY', diagnostic_opt_out=False) as ep: ep._wait_until_inactive() - assert mock_http.request_headers.get('Authorization') == 'SDK_KEY' +def test_event_schema_not_set_on_diagnostic_send(): + with DefaultTestProcessor(diagnostic_opt_out=False) as ep: + ep._wait_until_inactive() + assert mock_http.request_headers.get('X-LaunchDarkly-Event-Schema') is None + +def test_init_diagnostic_event_sent(): + with DefaultTestProcessor(diagnostic_opt_out=False) as ep: + diag_init = flush_and_get_events(ep) + # Fields are tested in test_diagnostics.py + assert len(diag_init) == 6 + assert diag_init['kind'] == 'diagnostic-init' + +def test_periodic_diagnostic_includes_events_in_batch(): + with DefaultTestProcessor(diagnostic_opt_out=False) as ep: + # Ignore init event + flush_and_get_events(ep) + # Send a payload with a single event + ep.send_event({ 'kind': 'identify', 'user': user }) + flush_and_get_events(ep) + + ep._send_diagnostic() + diag_event = flush_and_get_events(ep) + assert len(diag_event) == 8 + assert diag_event['kind'] == 'diagnostic' + assert diag_event['eventsInLastBatch'] == 1 + assert diag_event['deduplicatedUsers'] == 0 + +def test_periodic_diagnostic_includes_deduplicated_users(): + with DefaultTestProcessor(diagnostic_opt_out=False) as ep: + # Ignore init event + flush_and_get_events(ep) + # Send two eval events with the same user to cause a user deduplication + e0 = { + 'kind': 'feature', 'key': 'flagkey', 'version': 11, 'user': user, + 'variation': 1, 'value': 'value', 'default': 'default', 'trackEvents': True + } + e1 = e0.copy(); + ep.send_event(e0) + ep.send_event(e1) + flush_and_get_events(ep) + + ep._send_diagnostic() + diag_event = flush_and_get_events(ep) + assert len(diag_event) == 8 + assert diag_event['kind'] == 'diagnostic' + assert diag_event['eventsInLastBatch'] == 3 + assert diag_event['deduplicatedUsers'] == 1 + def test_no_more_payloads_are_sent_after_401_error(): verify_unrecoverable_http_error(401) diff --git a/testing/test_streaming.py b/testing/test_streaming.py index 229248a7..3f6c166d 100644 --- a/testing/test_streaming.py +++ b/testing/test_streaming.py @@ -152,5 +152,4 @@ def test_records_diagnostic_on_stream_init_failure(): server.await_request() recorded_inits = diag_accum.create_event_and_reset(0, 0)['streamInits'] - assert len(recorded_inits) == 1 assert recorded_inits[0]['failed'] is True From 08740f15a8bd797084ca96d4da7b70b6d8985fe9 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Mon, 6 Jan 2020 23:03:20 +0000 Subject: [PATCH 184/190] Remove janky reflection. --- ldclient/client.py | 22 +++++++++++++--------- ldclient/config.py | 3 +-- ldclient/event_processor.py | 12 +++--------- testing/test_event_processor.py | 13 ++++++++----- 4 files changed, 25 insertions(+), 25 deletions(-) diff --git a/ldclient/client.py b/ldclient/client.py index c51b2b53..b235aa3a 100644 --- a/ldclient/client.py +++ b/ldclient/client.py @@ -8,6 +8,8 @@ import traceback from ldclient.config import Config as Config +from ldclient.diagnostics import create_diagnostic_id, _DiagnosticAccumulator +from ldclient.event_processor import DefaultEventProcessor from ldclient.feature_requester import FeatureRequesterImpl from ldclient.feature_store import _FeatureStoreDataSetSorter from ldclient.flag import EvaluationDetail, evaluate, error_reason @@ -103,12 +105,7 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): if self._config.use_ldd: log.info("Started LaunchDarkly Client in LDD mode") - self._event_processor = self._make_event_processor(self._config) - - if callable(getattr(self._event_processor, 'retrieve_diagnostic_accumulator', None)): - diagnostic_accumulator = self._event_processor.retrieve_diagnostic_accumulator() - else: - diagnostic_accumulator = None + diagnostic_accumulator = self._set_event_processor(self._config) update_processor_ready = threading.Event() self._update_processor = self._make_update_processor(self._config, self._store, update_processor_ready, diagnostic_accumulator) @@ -124,10 +121,17 @@ def __init__(self, sdk_key=None, config=None, start_wait=5): log.warning("Initialization timeout exceeded for LaunchDarkly Client or an error occurred. " "Feature Flags may not yet be available.") - def _make_event_processor(self, config): + def _set_event_processor(self, config): if config.offline or not config.send_events: - return NullEventProcessor() - return config.event_processor_class(config) + self._event_processor = NullEventProcessor() + return None + if not config.event_processor_class: + diagnostic_id = create_diagnostic_id(config) + diagnostic_accumulator = _DiagnosticAccumulator(diagnostic_id) + self._event_processor = DefaultEventProcessor(config, diagnostic_accumulator = diagnostic_accumulator) + return diagnostic_accumulator + self._event_processor = config.event_processor_class(config) + return None def _make_update_processor(self, config, store, ready, diagnostic_accumulator): if config.update_processor_class: diff --git a/ldclient/config.py b/ldclient/config.py index 8b1ee411..6fec9865 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -4,7 +4,6 @@ Note that the same class can also be imported from the ``ldclient.client`` submodule. """ -from ldclient.event_processor import DefaultEventProcessor from ldclient.feature_store import InMemoryFeatureStore from ldclient.util import log @@ -133,7 +132,7 @@ def __init__(self, self.__poll_interval = max(poll_interval, 30) self.__use_ldd = use_ldd self.__feature_store = InMemoryFeatureStore() if not feature_store else feature_store - self.__event_processor_class = DefaultEventProcessor if not event_processor_class else event_processor_class + self.__event_processor_class = event_processor_class self.__feature_requester_class = feature_requester_class self.__connect_timeout = connect_timeout self.__read_timeout = read_timeout diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 69e1807e..259224dd 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -30,7 +30,7 @@ from ldclient.util import create_http_pool_manager from ldclient.util import log from ldclient.util import http_error_message, is_http_error_recoverable, stringify_attrs, throw_if_unsuccessful_response -from ldclient.diagnostics import create_diagnostic_init, create_diagnostic_id, _DiagnosticAccumulator +from ldclient.diagnostics import create_diagnostic_init __MAX_FLUSH_THREADS__ = 5 __CURRENT_EVENT_SCHEMA__ = 3 @@ -392,7 +392,7 @@ def _do_shutdown(self): class DefaultEventProcessor(EventProcessor): - def __init__(self, config, http=None, dispatcher_class=None): + def __init__(self, config, http=None, dispatcher_class=None, diagnostic_accumulator=None): self._inbox = queue.Queue(config.events_max_pending) self._inbox_full = False self._flush_timer = RepeatingTimer(config.flush_interval, self.flush) @@ -400,18 +400,15 @@ def __init__(self, config, http=None, dispatcher_class=None): self._flush_timer.start() self._users_flush_timer.start() if not config.diagnostic_opt_out: - diagnostic_id = create_diagnostic_id(config) - self._diagnostic_accumulator = _DiagnosticAccumulator(diagnostic_id) self._diagnostic_event_timer = RepeatingTimer(config.diagnostic_recording_interval, self._send_diagnostic) self._diagnostic_event_timer.start() else: self._diagnostic_event_timer = None - self._diagnostic_accumulator = None self._close_lock = Lock() self._closed = False - (dispatcher_class or EventDispatcher)(self._inbox, config, http, self._diagnostic_accumulator) + (dispatcher_class or EventDispatcher)(self._inbox, config, http, diagnostic_accumulator) def send_event(self, event): event['creationDate'] = int(time.time() * 1000) @@ -434,9 +431,6 @@ def stop(self): # is full; an orderly shutdown can't happen unless these messages are received. self._post_message_and_wait('stop') - def retrieve_diagnostic_accumulator(self): - return self._diagnostic_accumulator - def _post_to_inbox(self, message): try: self._inbox.put(message, block=False) diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index b015433b..ef47ceaf 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -4,6 +4,7 @@ import time from ldclient.config import Config +from ldclient.diagnostics import create_diagnostic_id, _DiagnosticAccumulator from ldclient.event_processor import DefaultEventProcessor from ldclient.util import log from testing.http_util import start_server @@ -66,7 +67,9 @@ class DefaultTestProcessor(DefaultEventProcessor): def __init__(self, **kwargs): if not 'diagnostic_opt_out' in kwargs: kwargs['diagnostic_opt_out'] = True - DefaultEventProcessor.__init__(self, Config(**kwargs), mock_http) + config = Config(**kwargs) + diagnostic_accumulator = _DiagnosticAccumulator(create_diagnostic_id(config)) + DefaultEventProcessor.__init__(self, config, mock_http, diagnostic_accumulator = diagnostic_accumulator) def test_identify_event_is_queued(): with DefaultTestProcessor() as ep: @@ -557,23 +560,23 @@ def start_consuming_events(): def test_can_use_http_proxy_via_environment_var(monkeypatch): with start_server() as server: monkeypatch.setenv('http_proxy', server.uri) - config = Config(sdk_key = 'sdk-key', events_uri = 'http://not-real') + config = Config(sdk_key = 'sdk-key', events_uri = 'http://not-real', diagnostic_opt_out = True) _verify_http_proxy_is_used(server, config) def test_can_use_https_proxy_via_environment_var(monkeypatch): with start_server() as server: monkeypatch.setenv('https_proxy', server.uri) - config = Config(sdk_key = 'sdk-key', events_uri = 'https://not-real') + config = Config(sdk_key = 'sdk-key', events_uri = 'https://not-real', diagnostic_opt_out = True) _verify_https_proxy_is_used(server, config) def test_can_use_http_proxy_via_config(): with start_server() as server: - config = Config(sdk_key = 'sdk-key', events_uri = 'http://not-real', http_proxy=server.uri) + config = Config(sdk_key = 'sdk-key', events_uri = 'http://not-real', http_proxy=server.uri, diagnostic_opt_out = True) _verify_http_proxy_is_used(server, config) def test_can_use_https_proxy_via_config(): with start_server() as server: - config = Config(sdk_key = 'sdk-key', events_uri = 'https://not-real', http_proxy=server.uri) + config = Config(sdk_key = 'sdk-key', events_uri = 'https://not-real', http_proxy=server.uri, diagnostic_opt_out = True) _verify_https_proxy_is_used(server, config) def _verify_http_proxy_is_used(server, config): From a26d4588236e905ec98d9ae09deb1d935460805f Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Mon, 6 Jan 2020 23:13:22 +0000 Subject: [PATCH 185/190] Test change to filesource optional test requirements. --- test-filesource-optional-requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test-filesource-optional-requirements.txt b/test-filesource-optional-requirements.txt index e0a0e284..40e04279 100644 --- a/test-filesource-optional-requirements.txt +++ b/test-filesource-optional-requirements.txt @@ -1,2 +1,2 @@ -pyyaml>=3.0 +pyyaml>=3.0,<5.2 watchdog>=0.9 From ef256a58e5171fe6210cec994798d28d0356cdd5 Mon Sep 17 00:00:00 2001 From: Gavin Whelan Date: Fri, 17 Jan 2020 11:56:41 +0000 Subject: [PATCH 186/190] [ch61092] Add event payload ID on event requests. --- ldclient/event_processor.py | 18 ++++++++++++++-- ldclient/util.py | 3 ++- test-filesource-optional-requirements.txt | 2 +- testing/stub_util.py | 25 ++++++++++++++++------ testing/test_event_processor.py | 26 +++++++++++++++++++++++ 5 files changed, 63 insertions(+), 11 deletions(-) diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 93680c13..6e3baab2 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -11,6 +11,7 @@ import six import time import urllib3 +import uuid # noinspection PyBroadException try: @@ -26,7 +27,7 @@ from ldclient.interfaces import EventProcessor from ldclient.repeating_timer import RepeatingTimer from ldclient.util import UnsuccessfulResponseException -from ldclient.util import _headers +from ldclient.util import _headers, _retryable_statuses from ldclient.util import create_http_pool_manager from ldclient.util import log from ldclient.util import http_error_message, is_http_error_recoverable, stringify_attrs, throw_if_unsuccessful_response @@ -140,6 +141,18 @@ def _get_userkey(self, event): return str(event['user'].get('key')) +class _EventRetry(urllib3.Retry): + def __init__(self): + urllib3.Retry.__init__(self, total=1, + method_whitelist=False, # Enable retry on POST + status_forcelist=_retryable_statuses, + raise_on_status=False) + + # Override backoff time to be flat 1 second + def get_backoff_time(self): + return 1 + + class EventPayloadSendTask(object): def __init__(self, http, config, formatter, payload, response_fn): self._http = http @@ -164,12 +177,13 @@ def _do_send(self, output_events): log.debug('Sending events payload: ' + json_body) hdrs = _headers(self._config.sdk_key) hdrs['X-LaunchDarkly-Event-Schema'] = str(__CURRENT_EVENT_SCHEMA__) + hdrs['X-LaunchDarkly-Payload-ID'] = str(uuid.uuid4()) uri = self._config.events_uri r = self._http.request('POST', uri, headers=hdrs, timeout=urllib3.Timeout(connect=self._config.connect_timeout, read=self._config.read_timeout), body=json_body, - retries=1) + retries=_EventRetry()) self._response_fn(r) return r except Exception as e: diff --git a/ldclient/util.py b/ldclient/util.py index 1d059798..c19190f2 100644 --- a/ldclient/util.py +++ b/ldclient/util.py @@ -37,6 +37,7 @@ # noinspection PyUnresolvedReferences __BASE_TYPES__ = (str, float, int, bool, unicode) +_retryable_statuses = [400, 408, 429] def _headers(sdk_key): return {'Authorization': sdk_key, 'User-Agent': 'PythonClient/' + VERSION, @@ -124,7 +125,7 @@ def throw_if_unsuccessful_response(resp): def is_http_error_recoverable(status): if status >= 400 and status < 500: - return (status == 400) or (status == 408) or (status == 429) # all other 4xx besides these are unrecoverable + return status in _retryable_statuses # all other 4xx besides these are unrecoverable return True # all other errors are recoverable diff --git a/test-filesource-optional-requirements.txt b/test-filesource-optional-requirements.txt index e0a0e284..40e04279 100644 --- a/test-filesource-optional-requirements.txt +++ b/test-filesource-optional-requirements.txt @@ -1,2 +1,2 @@ -pyyaml>=3.0 +pyyaml>=3.0,<5.2 watchdog>=0.9 diff --git a/testing/stub_util.py b/testing/stub_util.py index 80e53af6..41970edf 100644 --- a/testing/stub_util.py +++ b/testing/stub_util.py @@ -53,17 +53,20 @@ def getheader(self, name): class MockHttp(object): def __init__(self): + self._recorded_requests = [] self._request_data = None self._request_headers = None + self._response_func = None self._response_status = 200 self._server_time = None def request(self, method, uri, headers, timeout, body, retries): - self._request_headers = headers - self._request_data = body + self._recorded_requests.append((headers, body)) resp_hdr = dict() if self._server_time is not None: resp_hdr['date'] = formatdate(self._server_time / 1000, localtime=False, usegmt=True) + if self._response_func is not None: + return self._response_func() return MockResponse(self._response_status, resp_hdr) def clear(self): @@ -71,21 +74,29 @@ def clear(self): @property def request_data(self): - return self._request_data + if len(self._recorded_requests) != 0: + return self._recorded_requests[-1][1] @property def request_headers(self): - return self._request_headers + if len(self._recorded_requests) != 0: + return self._recorded_requests[-1][0] + + @property + def recorded_requests(self): + return self._recorded_requests def set_response_status(self, status): self._response_status = status - + + def set_response_func(self, response_func): + self._response_func = response_func + def set_server_time(self, timestamp): self._server_time = timestamp def reset(self): - self._request_headers = None - self._request_data = None + self._recorded_requests = [] class MockUpdateProcessor(UpdateProcessor): def __init__(self, config, store, ready): diff --git a/testing/test_event_processor.py b/testing/test_event_processor.py index 9ef1b4f8..598038b2 100644 --- a/testing/test_event_processor.py +++ b/testing/test_event_processor.py @@ -2,6 +2,7 @@ import pytest from threading import Thread import time +import uuid from ldclient.config import Config from ldclient.event_processor import DefaultEventProcessor @@ -541,6 +542,31 @@ def verify_recoverable_http_error(status): ep._wait_until_inactive() assert mock_http.request_data is not None +def test_event_payload_id_is_sent(): + with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + headerVal = mock_http.request_headers.get('X-LaunchDarkly-Payload-ID') + assert headerVal is not None + # Throws on invalid UUID + uuid.UUID(headerVal) + +def test_event_payload_id_changes_between_requests(): + with DefaultEventProcessor(Config(sdk_key = 'SDK_KEY'), mock_http) as ep: + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + ep.send_event({ 'kind': 'identify', 'user': user }) + ep.flush() + ep._wait_until_inactive() + + firstPayloadId = mock_http.recorded_requests[0][0].get('X-LaunchDarkly-Payload-ID') + secondPayloadId = mock_http.recorded_requests[1][0].get('X-LaunchDarkly-Payload-ID') + assert firstPayloadId != secondPayloadId + def flush_and_get_events(ep): ep.flush() ep._wait_until_inactive() From 3a525e32945f4dee699d5670578997f0bcc42b1e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 10 Feb 2020 18:10:14 -0800 Subject: [PATCH 187/190] normalize data store type and OS name in diagnostic events --- ldclient/diagnostics.py | 17 +++++++++++++++-- ldclient/feature_store.py | 7 +++++-- ldclient/feature_store_helpers.py | 9 +++++++-- .../integrations/consul/consul_feature_store.py | 7 +++++-- .../dynamodb/dynamodb_feature_store.py | 5 ++++- .../integrations/redis/redis_feature_store.py | 7 +++++-- ldclient/interfaces.py | 16 ++++++++++++++++ testing/test_diagnostics.py | 13 ++++++++++--- 8 files changed, 67 insertions(+), 14 deletions(-) diff --git a/ldclient/diagnostics.py b/ldclient/diagnostics.py index 4ce593b2..2890ca3a 100644 --- a/ldclient/diagnostics.py +++ b/ldclient/diagnostics.py @@ -79,7 +79,7 @@ def _create_diagnostic_config_object(config): 'userKeysFlushIntervalMillis': config.user_keys_flush_interval * 1000, 'inlineUsersInEvents': config.inline_users_in_events, 'diagnosticRecordingIntervalMillis': config.diagnostic_recording_interval * 1000, - 'featureStoreFactory': config.feature_store.__class__.__name__} + 'dataStoreType': _get_component_type_name(config.feature_store, config, 'memory')} def _create_diagnostic_sdk_object(config): return {'name': 'python-server-sdk', @@ -90,7 +90,20 @@ def _create_diagnostic_sdk_object(config): def _create_diagnostic_platform_object(): return {'name': 'python', 'osArch': platform.machine(), - 'osName': platform.system(), + 'osName': _normalize_os_name(platform.system()), 'osVersion': platform.release(), 'pythonVersion': platform.python_version(), 'pythonImplementation': platform.python_implementation()} + +def _get_component_type_name(component, config, default_name): + if component is not None: + if callable(getattr(component, 'describe_configuration', None)): + return component.describe_configuration(config) + return "custom" + return default_name + +def _normalize_os_name(name): + if name == 'Darwin': + return 'MacOS' + # Python already returns 'Linux' or 'Windows' for Linux or Windows, which is what we want + return name diff --git a/ldclient/feature_store.py b/ldclient/feature_store.py index efabe82e..501d8667 100644 --- a/ldclient/feature_store.py +++ b/ldclient/feature_store.py @@ -8,7 +8,7 @@ from collections import OrderedDict, defaultdict from ldclient.util import log -from ldclient.interfaces import FeatureStore +from ldclient.interfaces import DiagnosticDescription, FeatureStore from ldclient.rwlock import ReadWriteLock from six import iteritems @@ -75,7 +75,7 @@ def capacity(self): return self._capacity -class InMemoryFeatureStore(FeatureStore): +class InMemoryFeatureStore(FeatureStore, DiagnosticDescription): """The default feature store implementation, which holds all data in a thread-safe data structure in memory. """ @@ -163,6 +163,9 @@ def initialized(self): return self._initialized finally: self._lock.runlock() + + def describe_configuration(self, config): + return 'memory' class _FeatureStoreDataSetSorter: diff --git a/ldclient/feature_store_helpers.py b/ldclient/feature_store_helpers.py index 58f9a848..0f371f7b 100644 --- a/ldclient/feature_store_helpers.py +++ b/ldclient/feature_store_helpers.py @@ -4,10 +4,10 @@ from expiringdict import ExpiringDict -from ldclient.interfaces import FeatureStore +from ldclient.interfaces import DiagnosticDescription, FeatureStore -class CachingStoreWrapper(FeatureStore): +class CachingStoreWrapper(DiagnosticDescription, FeatureStore): """A partial implementation of :class:`ldclient.interfaces.FeatureStore`. This class delegates the basic functionality to an implementation of @@ -100,6 +100,11 @@ def initialized(self): self._inited = True return result + def describe_configuration(self, config): + if callable(getattr(self._core, 'describe_configuration', None)): + return self._core.describe_configuration(config) + return "custom" + @staticmethod def _item_cache_key(kind, key): return "{0}:{1}".format(kind.namespace, key) diff --git a/ldclient/impl/integrations/consul/consul_feature_store.py b/ldclient/impl/integrations/consul/consul_feature_store.py index 6fc8652e..497828a3 100644 --- a/ldclient/impl/integrations/consul/consul_feature_store.py +++ b/ldclient/impl/integrations/consul/consul_feature_store.py @@ -10,7 +10,7 @@ from ldclient import log from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper -from ldclient.interfaces import FeatureStore, FeatureStoreCore +from ldclient.interfaces import DiagnosticDescription, FeatureStore, FeatureStoreCore # # Internal implementation of the Consul feature store. @@ -33,7 +33,7 @@ # process that did the Init will also receive the new data shortly and do its own Upsert. # -class _ConsulFeatureStoreCore(FeatureStoreCore): +class _ConsulFeatureStoreCore(DiagnosticDescription, FeatureStoreCore): def __init__(self, host, port, prefix, consul_opts): if not have_consul: raise NotImplementedError("Cannot use Consul feature store because the python-consul package is not installed") @@ -115,6 +115,9 @@ def initialized_internal(self): index, resp = self._client.kv.get(self._inited_key()) return (resp is not None) + def describe_configuration(self, config): + return 'Consul' + def _kind_key(self, kind): return self._prefix + kind.namespace diff --git a/ldclient/impl/integrations/dynamodb/dynamodb_feature_store.py b/ldclient/impl/integrations/dynamodb/dynamodb_feature_store.py index 23ca3fce..79842ef6 100644 --- a/ldclient/impl/integrations/dynamodb/dynamodb_feature_store.py +++ b/ldclient/impl/integrations/dynamodb/dynamodb_feature_store.py @@ -10,7 +10,7 @@ from ldclient import log from ldclient.feature_store import CacheConfig from ldclient.feature_store_helpers import CachingStoreWrapper -from ldclient.interfaces import FeatureStore, FeatureStoreCore +from ldclient.interfaces import DiagnosticDescription, FeatureStore, FeatureStoreCore # # Internal implementation of the DynamoDB feature store. @@ -120,6 +120,9 @@ def initialized_internal(self): resp = self._get_item_by_keys(self._inited_key(), self._inited_key()) return resp.get('Item') is not None and len(resp['Item']) > 0 + def describe_configuration(self, config): + return 'DynamoDB' + def _prefixed_namespace(self, base): return base if self._prefix is None else (self._prefix + ':' + base) diff --git a/ldclient/impl/integrations/redis/redis_feature_store.py b/ldclient/impl/integrations/redis/redis_feature_store.py index a23c2d66..eebe205d 100644 --- a/ldclient/impl/integrations/redis/redis_feature_store.py +++ b/ldclient/impl/integrations/redis/redis_feature_store.py @@ -8,11 +8,11 @@ pass from ldclient import log -from ldclient.interfaces import FeatureStoreCore +from ldclient.interfaces import DiagnosticDescription, FeatureStoreCore from ldclient.versioned_data_kind import FEATURES -class _RedisFeatureStoreCore(FeatureStoreCore): +class _RedisFeatureStoreCore(DiagnosticDescription, FeatureStoreCore): def __init__(self, url, prefix, max_connections): if not have_redis: raise NotImplementedError("Cannot use Redis feature store because redis package is not installed") @@ -96,6 +96,9 @@ def initialized_internal(self): r = redis.Redis(connection_pool=self._pool) return r.exists(self._items_key(FEATURES)) + def describe_configuration(self, config): + return 'Redis' + def _before_update_transaction(self, base_key, key): # exposed for testing pass diff --git a/ldclient/interfaces.py b/ldclient/interfaces.py index 48c517b8..1a319494 100644 --- a/ldclient/interfaces.py +++ b/ldclient/interfaces.py @@ -269,3 +269,19 @@ def get_one(self, kind, key): :return: """ pass + + +class DiagnosticDescription(object): + """ + Optional interface for components to describe their own configuration. + """ + + @abstractmethod + def describe_configuration(self, config): + """ + Used internally by the SDK to inspect the configuration. + :param ldclient.config.Config config: the full configuration, in case this component depends on properties outside itself + :return: a string describing the type of the component, or None + :rtype: string + """ + pass diff --git a/testing/test_diagnostics.py b/testing/test_diagnostics.py index 0a218094..8bff0055 100644 --- a/testing/test_diagnostics.py +++ b/testing/test_diagnostics.py @@ -3,6 +3,8 @@ from ldclient.config import Config from ldclient.diagnostics import create_diagnostic_id, create_diagnostic_init, _DiagnosticAccumulator, _create_diagnostic_config_object +from ldclient.feature_store import CacheConfig +from ldclient.feature_store_helpers import CachingStoreWrapper def test_create_diagnostic_id(): test_config = Config(sdk_key = "SDK_KEY") @@ -57,13 +59,14 @@ def test_create_diagnostic_config_defaults(): assert diag_config['userKeysFlushIntervalMillis'] == 300000 assert diag_config['inlineUsersInEvents'] is False assert diag_config['diagnosticRecordingIntervalMillis'] == 900000 - assert diag_config['featureStoreFactory'] == 'InMemoryFeatureStore' + assert diag_config['dataStoreType'] == 'memory' def test_create_diagnostic_config_custom(): + test_store = CachingStoreWrapper(_TestStoreForDiagnostics(), CacheConfig.default()) test_config = Config(base_uri='https://test.com', events_uri='https://test.com', connect_timeout=1, read_timeout=1, events_max_pending=10, flush_interval=1, stream_uri='https://test.com', - stream=False, poll_interval=60, use_ldd=True, feature_store = 5, + stream=False, poll_interval=60, use_ldd=True, feature_store=test_store, all_attributes_private=True, user_keys_capacity=10, user_keys_flush_interval=60, inline_users_in_events=True, http_proxy='', diagnostic_recording_interval=60) diag_config = _create_diagnostic_config_object(test_config) @@ -85,7 +88,11 @@ def test_create_diagnostic_config_custom(): assert diag_config['userKeysFlushIntervalMillis'] == 60000 assert diag_config['inlineUsersInEvents'] is True assert diag_config['diagnosticRecordingIntervalMillis'] == 60000 - assert diag_config['featureStoreFactory'] == 'int' + assert diag_config['dataStoreType'] == 'MyFavoriteStore' + +class _TestStoreForDiagnostics(object): + def describe_configuration(self, config): + return 'MyFavoriteStore' def test_diagnostic_accumulator(): test_config = Config(sdk_key = "SDK_KEY") From 425dceb377699d2db3d4234ef1174c19c9c65bae Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 10 Feb 2020 18:15:29 -0800 Subject: [PATCH 188/190] gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index c949312e..f0def2a6 100644 --- a/.gitignore +++ b/.gitignore @@ -67,3 +67,5 @@ p2venv *.iml .vagrant test-packaging-venv + +.vscode/ From 27fb9a7509f27b3174485345946d85da288527d8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 11 Feb 2020 12:36:01 -0800 Subject: [PATCH 189/190] copyedit to diagnostic event config property comment --- ldclient/config.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/ldclient/config.py b/ldclient/config.py index 6fec9865..ae2e522c 100644 --- a/ldclient/config.py +++ b/ldclient/config.py @@ -104,11 +104,11 @@ def __init__(self, variable, this is used regardless of whether the target URI is HTTP or HTTPS (the actual LaunchDarkly service uses HTTPS, but a Relay Proxy instance could use HTTP). Setting this Config parameter will override any proxy specified by an environment variable, but only for LaunchDarkly SDK connections. - :param bool diagnostic_opt_out: Unless the diagnosticOptOut field is set to True, the client will send + :param bool diagnostic_opt_out: Unless this field is set to True, the client will send some diagnostics data to the LaunchDarkly servers in order to assist in the development of future SDK improvements. These diagnostics consist of an initial payload containing some details of SDK in use, - the SDK's configuration, and the platform the SDK is being run on; as well as payloads sent - periodically with information on irregular occurrences such as dropped events. + the SDK's configuration, and the platform the SDK is being run on, as well as periodic information + on irregular occurrences such as dropped events. :param int diagnostic_recording_interval: The interval in seconds at which periodic diagnostic data is sent. The default is 900 seconds (every 15 minutes) and the minimum value is 60 seconds. :param string wrapper_name: For use by wrapper libraries to set an identifying name for the wrapper From 13ddc54956046889f43927f896351d77a0c4c258 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 12 Feb 2020 12:45:00 -0800 Subject: [PATCH 190/190] fix spurious error after sending diagnostic event --- ldclient/event_processor.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/ldclient/event_processor.py b/ldclient/event_processor.py index 6bc19f7d..5c3457c1 100644 --- a/ldclient/event_processor.py +++ b/ldclient/event_processor.py @@ -209,8 +209,6 @@ def run(self): timeout=urllib3.Timeout(connect=self._config.connect_timeout, read=self._config.read_timeout), body=json_body, retries=1) - if (self._response_fn): - self._response_fn(r) except Exception as e: log.warning( 'Unhandled exception in event processor. Diagnostic event was not sent. [%s]', e)