From f5092af8b94a58c09ab74f7d3870229a6424776e Mon Sep 17 00:00:00 2001 From: Andrew Shannon Brown Date: Mon, 23 Jul 2018 17:09:29 -0700 Subject: [PATCH 001/182] Remove @ashanbrown from codeowners --- CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CODEOWNERS b/CODEOWNERS index 44429ee1..8b137891 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1 +1 @@ -* @ashanbrown + From fd63b2b84cd7806bbbacb094b0ac3ce2502fe94f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Aug 2018 17:40:01 -0700 Subject: [PATCH 002/182] log exception stacktraces at debug level --- lib/ldclient-rb/events.rb | 4 ++-- lib/ldclient-rb/ldclient.rb | 12 +++--------- lib/ldclient-rb/util.rb | 5 +++++ spec/ldclient_spec.rb | 11 ----------- 4 files changed, 10 insertions(+), 22 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 0c9a0ece..202fc235 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -142,7 +142,7 @@ def main_loop(queue, buffer, flush_workers) message.completed end rescue => e - @config.logger.warn { "[LDClient] Unexpected error in event processor: #{e.inspect}. \nTrace: #{e.backtrace}" } + Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end end @@ -226,7 +226,7 @@ def trigger_flush(buffer, flush_workers) resp = EventPayloadSendTask.new.run(@sdk_key, @config, @client, payload, @formatter) handle_response(resp) if !resp.nil? rescue => e - @config.logger.warn { "[LDClient] Unexpected error in event processor: #{e.inspect}. \nTrace: #{e.backtrace}" } + Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end buffer.clear if success # Reset our internal state, these events now belong to the flush worker diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 5c0e872d..3f0f6d9a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -162,7 +162,7 @@ def variation(key, user, default) @event_processor.add_event(make_feature_event(feature, user, res[:variation], value, default)) return value rescue => exn - @config.logger.warn { "[LDClient] Error evaluating feature flag: #{exn.inspect}. \nTrace: #{exn.backtrace}" } + Util.log_exception(@config.logger, "Error evaluating feature flag", exn) @event_processor.add_event(make_feature_event(feature, user, nil, default, default)) return default end @@ -210,7 +210,7 @@ def all_flags(user) # TODO rescue if necessary Hash[features.map{ |k, f| [k, evaluate(f, user, @store, @config.logger)[:value]] }] rescue => exn - @config.logger.warn { "[LDClient] Error evaluating all flags: #{exn.inspect}. \nTrace: #{exn.backtrace}" } + Util.log_exception(@config.logger, "Error evaluating all flags", exn) return Hash.new end end @@ -226,12 +226,6 @@ def close @store.stop end - def log_exception(caller, exn) - error_traceback = "#{exn.inspect} #{exn}\n\t#{exn.backtrace.join("\n\t")}" - error = "[LDClient] Unexpected exception in #{caller}: #{error_traceback}" - @config.logger.error { error } - end - def sanitize_user(user) if user[:key] user[:key] = user[:key].to_s @@ -252,7 +246,7 @@ def make_feature_event(flag, user, variation, value, default) } end - private :evaluate, :log_exception, :sanitize_user, :make_feature_event + private :evaluate, :sanitize_user, :make_feature_event end # diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 6ba70dbc..99ee2477 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,6 +1,11 @@ module LaunchDarkly module Util + def self.log_exception(logger, message, exc) + logger.warn { "[LDClient] #{message}: #{exc.inspect}" } + logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } + end + def self.http_error_recoverable?(status) if status >= 400 && status < 500 status == 400 || status == 408 || status == 429 diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 8e4b5eb5..68c57166 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -130,17 +130,6 @@ def event_processor end end - describe '#log_exception' do - it "log error data" do - expect(client.instance_variable_get(:@config).logger).to receive(:error) - begin - raise StandardError.new 'asdf' - rescue StandardError => exn - client.send(:log_exception, 'caller', exn) - end - end - end - describe 'with send_events: false' do let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, update_processor: update_processor}) } let(:client) { subject.new("secret", config) } From d4be186ed2026056dd9768fd2b265181f9353c72 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Aug 2018 17:48:15 -0700 Subject: [PATCH 003/182] re-add minimal unit test --- spec/util_spec.rb | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 spec/util_spec.rb diff --git a/spec/util_spec.rb b/spec/util_spec.rb new file mode 100644 index 00000000..25881aaa --- /dev/null +++ b/spec/util_spec.rb @@ -0,0 +1,17 @@ +require "spec_helper" + +describe LaunchDarkly::Util do + describe 'log_exception' do + let(:logger) { double() } + + it "logs error data" do + expect(logger).to receive(:warn) + expect(logger).to receive(:debug) + begin + raise StandardError.new 'asdf' + rescue StandardError => exn + LaunchDarkly::Util.log_exception(logger, "message", exn) + end + end + end +end From d73d66c19c03511905aa9eef827bb656b19791be Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Aug 2018 17:51:32 -0700 Subject: [PATCH 004/182] log exceptions at error level --- lib/ldclient-rb/util.rb | 2 +- spec/util_spec.rb | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 99ee2477..707ba3ce 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -2,7 +2,7 @@ module LaunchDarkly module Util def self.log_exception(logger, message, exc) - logger.warn { "[LDClient] #{message}: #{exc.inspect}" } + logger.error { "[LDClient] #{message}: #{exc.inspect}" } logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } end diff --git a/spec/util_spec.rb b/spec/util_spec.rb index 25881aaa..50a72f76 100644 --- a/spec/util_spec.rb +++ b/spec/util_spec.rb @@ -5,7 +5,7 @@ let(:logger) { double() } it "logs error data" do - expect(logger).to receive(:warn) + expect(logger).to receive(:error) expect(logger).to receive(:debug) begin raise StandardError.new 'asdf' From ca15234e9214701061528d3ce702c20d34d3a9a9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 17 Aug 2018 16:30:19 -0700 Subject: [PATCH 005/182] add new version of all_flags that captures more metadata --- lib/ldclient-rb.rb | 1 + lib/ldclient-rb/flags_state.rb | 51 +++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 44 ++++++++++++---- spec/ldclient_spec.rb | 91 ++++++++++++++++++++++++++++++++++ 4 files changed, 176 insertions(+), 11 deletions(-) create mode 100644 lib/ldclient-rb/flags_state.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index ce9d0307..7264b220 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -1,6 +1,7 @@ require "ldclient-rb/version" require "ldclient-rb/util" require "ldclient-rb/evaluation" +require "ldclient-rb/flags_state" require "ldclient-rb/ldclient" require "ldclient-rb/cache_store" require "ldclient-rb/expiring_cache" diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb new file mode 100644 index 00000000..f68dc20b --- /dev/null +++ b/lib/ldclient-rb/flags_state.rb @@ -0,0 +1,51 @@ + +module LaunchDarkly + # + # A snapshot of the state of all feature flags with regard to a specific user, generated by + # calling the client's all_flags_state method. + # + class FeatureFlagsState + def initialize(valid) + @flag_values = {} + @flag_metadata = {} + @valid = valid + end + + # Used internally to build the state map. + def add_flag(flag, value, variation) + key = flag[:key] + @flag_values[key] = value + meta = { version: flag[:version], trackEvents: flag[:trackEvents] } + meta[:variation] = variation if !variation.nil? + meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + @flag_metadata[key] = meta + end + + # Returns true if this object contains a valid snapshot of feature flag state, or false if the + # state could not be computed (for instance, because the client was offline or there was no user). + def valid? + @valid + end + + # Returns the value of an individual feature flag at the time the state was recorded. + # Returns nil if the flag returned the default value, or if there was no such flag. + def flag_value(key) + @flag_values[key] + end + + # Returns a map of flag keys to flag values. If a flag would have evaluated to the default value, + # its value will be nil. + def values_map + @flag_values + end + + # Returns a JSON string representation of the entire state map, in the format used by the + # LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end that + # will be used to "bootstrap" the JavaScript client. + def json_string + ret = @flag_values.clone + ret['$flagsState'] = @flag_metadata + ret.to_json + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 3f0f6d9a..5c64b7e7 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -193,26 +193,48 @@ def track(event_name, user, data) end # - # Returns all feature flag values for the given user + # Returns all feature flag values for the given user. This method is deprecated - please use + # all_flags_state instead. Current versions of the client-side SDK (2.0.0 and later) will not + # generate analytics events correctly if you pass the result of all_flags. # def all_flags(user) - sanitize_user(user) - return Hash.new if @config.offline? + all_flags_state(user).values_map + end - unless user - @config.logger.error { "[LDClient] Must specify user in all_flags" } - return Hash.new + # + # Returns a FeatureFlagsState object that encapsulates the state of all feature flags for a given user, + # including the flag values and also metadata that can be used on the front end. This method does not + # send analytics events back to LaunchDarkly. + # + def all_flags_state(user) + return FeatureFlagsState.new(false) if @config.offline? + + unless user && !user[:key].nil? + @config.logger.error { "[LDClient] User and user key must be specified in all_flags_state" } + return FeatureFlagsState.new(false) end + sanitize_user(user) + begin features = @store.all(FEATURES) - - # TODO rescue if necessary - Hash[features.map{ |k, f| [k, evaluate(f, user, @store, @config.logger)[:value]] }] rescue => exn - Util.log_exception(@config.logger, "Error evaluating all flags", exn) - return Hash.new + Util.log_exception(@config.logger, "Unable to read flags for all_flags_state", exn) + return FeatureFlagsState.new(false) + end + + state = FeatureFlagsState.new(true) + features.each do |k, f| + begin + result = evaluate(f, user, @store, @config.logger) + state.add_flag(f, result[:value], result[:variation]) + rescue => exn + Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) + state.add_flag(f, nil, nil) + end end + + state end # diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 68c57166..9d13dee0 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -99,6 +99,97 @@ def event_processor end end + describe '#all_flags' do + let(:flag1) { { key: "key1", offVariation: 0, variations: [ 'value1' ] } } + let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } + + it "returns flag values" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags({ key: 'userkey' }) + expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + end + + it "returns empty map for nil user" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags(nil) + expect(result).to eq({}) + end + + it "returns empty map for nil user key" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags({}) + expect(result).to eq({}) + end + + it "returns empty map if offline" do + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = offline_client.all_flags(nil) + expect(result).to eq({}) + end + end + + describe '#all_flags_state' do + let(:flag1) { { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } } + let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } + + it "returns flags state" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + + result = JSON.parse(state.json_string) + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + '$flagsState' => { + 'key1' => { + 'variation' => 0, + 'version' => 100, + 'trackEvents' => false + }, + 'key2' => { + 'variation' => 1, + 'version' => 200, + 'trackEvents' => true, + 'debugEventsUntilDate' => 1000 + } + } + }) + end + + it "returns empty state for nil user" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state(nil) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + + it "returns empty state for nil user key" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state({}) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + + it "returns empty state if offline" do + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = offline_client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + end + describe '#secure_mode_hash' do it "will return the expected value for a known message and secret" do result = client.secure_mode_hash({key: :Message}) From ed19523fd0d93306204929248e179945fdabf10f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 17 Aug 2018 16:37:43 -0700 Subject: [PATCH 006/182] add tests for FeatureFlagsState --- spec/flags_state_spec.rb | 56 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 spec/flags_state_spec.rb diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb new file mode 100644 index 00000000..9241028d --- /dev/null +++ b/spec/flags_state_spec.rb @@ -0,0 +1,56 @@ +require "spec_helper" + +describe LaunchDarkly::FeatureFlagsState do + subject { LaunchDarkly::FeatureFlagsState } + + it "can get flag value" do + state = subject.new(true) + flag = { key: 'key' } + state.add_flag(flag, 'value', 1) + + expect(state.flag_value('key')).to eq 'value' + end + + it "returns nil for unknown flag" do + state = subject.new(true) + + expect(state.flag_value('key')).to be nil + end + + it "can be converted to values map" do + state = subject.new(true) + flag1 = { key: 'key1' } + flag2 = { key: 'key2' } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + expect(state.values_map).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + end + + it "can be converted to JSON string" do + state = subject.new(true) + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + result = JSON.parse(state.json_string) + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + '$flagsState' => { + 'key1' => { + 'variation' => 0, + 'version' => 100, + 'trackEvents' => false + }, + 'key2' => { + 'variation' => 1, + 'version' => 200, + 'trackEvents' => true, + 'debugEventsUntilDate' => 1000 + } + } + }) + end +end From 73f2d892fa166b5ccf2b68f268f77c04a49462ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Aug 2018 12:59:10 -0700 Subject: [PATCH 007/182] provide as_json method that returns a hash instead of just a string --- lib/ldclient-rb/flags_state.rb | 22 +++++++++++++++++----- lib/ldclient-rb/ldclient.rb | 4 ++-- spec/flags_state_spec.rb | 30 +++++++++++++++++++++--------- spec/ldclient_spec.rb | 16 ++++++++-------- 4 files changed, 48 insertions(+), 24 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index f68dc20b..a5af6c5a 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -1,3 +1,4 @@ +require 'json' module LaunchDarkly # @@ -35,17 +36,28 @@ def flag_value(key) # Returns a map of flag keys to flag values. If a flag would have evaluated to the default value, # its value will be nil. + # + # Do not use this method if you are passing data to the front end to "bootstrap" the JavaScript client. + # Instead, use as_json. def values_map @flag_values end - # Returns a JSON string representation of the entire state map, in the format used by the - # LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end that - # will be used to "bootstrap" the JavaScript client. - def json_string + # Returns a hash that can be used as a JSON representation of the entire state map, in the format + # used by the LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end + # in order to "bootstrap" the JavaScript client. + # + # Do not rely on the exact shape of this data, as it may change in future to support the needs of + # the JavaScript client. + def as_json(*) # parameter is unused, but may be passed if we're using the json gem ret = @flag_values.clone ret['$flagsState'] = @flag_metadata - ret.to_json + ret + end + + # Same as as_json, but converts the JSON structure into a string. + def to_json(*a) + as_json.to_json(a) end end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 5c64b7e7..c8addbca 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -194,8 +194,8 @@ def track(event_name, user, data) # # Returns all feature flag values for the given user. This method is deprecated - please use - # all_flags_state instead. Current versions of the client-side SDK (2.0.0 and later) will not - # generate analytics events correctly if you pass the result of all_flags. + # all_flags_state instead. Current versions of the client-side SDK will not generate analytics + # events correctly if you pass the result of all_flags. # def all_flags(user) all_flags_state(user).values_map diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index 9241028d..e6e1c17c 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -27,30 +27,42 @@ expect(state.values_map).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) end - it "can be converted to JSON string" do + it "can be converted to JSON structure" do state = subject.new(true) flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } state.add_flag(flag1, 'value1', 0) state.add_flag(flag2, 'value2', 1) - result = JSON.parse(state.json_string) + result = state.as_json expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2', '$flagsState' => { 'key1' => { - 'variation' => 0, - 'version' => 100, - 'trackEvents' => false + :variation => 0, + :version => 100, + :trackEvents => false }, 'key2' => { - 'variation' => 1, - 'version' => 200, - 'trackEvents' => true, - 'debugEventsUntilDate' => 1000 + :variation => 1, + :version => 200, + :trackEvents => true, + :debugEventsUntilDate => 1000 } } }) end + + it "can be converted to JSON string" do + state = subject.new(true) + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + object = state.as_json + str = state.to_json + expect(object.to_json).to eq(str) + end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 9d13dee0..b5939ea1 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -145,21 +145,21 @@ def event_processor values = state.values_map expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - result = JSON.parse(state.json_string) + result = state.as_json expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2', '$flagsState' => { 'key1' => { - 'variation' => 0, - 'version' => 100, - 'trackEvents' => false + :variation => 0, + :version => 100, + :trackEvents => false }, 'key2' => { - 'variation' => 1, - 'version' => 200, - 'trackEvents' => true, - 'debugEventsUntilDate' => 1000 + :variation => 1, + :version => 200, + :trackEvents => true, + :debugEventsUntilDate => 1000 } } }) From ab896b1e801f944166c5525e6aa1d00cf333da0b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Aug 2018 20:01:32 -0700 Subject: [PATCH 008/182] state can be serialized with JSON.generate --- lib/ldclient-rb/flags_state.rb | 5 ++++- spec/flags_state_spec.rb | 16 +++++++++++++++- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index a5af6c5a..09f88975 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -3,7 +3,9 @@ module LaunchDarkly # # A snapshot of the state of all feature flags with regard to a specific user, generated by - # calling the client's all_flags_state method. + # calling the client's all_flags_state method. Serializing this object to JSON using + # JSON.generate (or the to_json method) will produce the appropriate data structure for + # bootstrapping the LaunchDarkly JavaScript client. # class FeatureFlagsState def initialize(valid) @@ -52,6 +54,7 @@ def values_map def as_json(*) # parameter is unused, but may be passed if we're using the json gem ret = @flag_values.clone ret['$flagsState'] = @flag_metadata + ret['$valid'] = @valid ret end diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index e6e1c17c..3d21029b 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -1,4 +1,5 @@ require "spec_helper" +require "json" describe LaunchDarkly::FeatureFlagsState do subject { LaunchDarkly::FeatureFlagsState } @@ -50,7 +51,8 @@ :trackEvents => true, :debugEventsUntilDate => 1000 } - } + }, + '$valid' => true }) end @@ -65,4 +67,16 @@ str = state.to_json expect(object.to_json).to eq(str) end + + it "uses our custom serializer with JSON.generate" do + state = subject.new(true) + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + stringFromToJson = state.to_json + stringFromGenerate = JSON.generate(state) + expect(stringFromGenerate).to eq(stringFromToJson) + end end From 00347c66ae17167910d316617e061d85f6793681 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Aug 2018 20:02:41 -0700 Subject: [PATCH 009/182] add $valid --- spec/ldclient_spec.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index b5939ea1..5dbb8195 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -161,7 +161,8 @@ def event_processor :trackEvents => true, :debugEventsUntilDate => 1000 } - } + }, + '$valid' => true }) end From bdac27e1cf37e2c95c4455d705a99aaa2a948b28 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 21 Aug 2018 11:46:14 -0700 Subject: [PATCH 010/182] add ability to filter for only client-side flags --- lib/ldclient-rb/ldclient.rb | 17 +++++++++++++++-- spec/ldclient_spec.rb | 16 ++++++++++++++++ 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index c8addbca..e9873679 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -194,9 +194,12 @@ def track(event_name, user, data) # # Returns all feature flag values for the given user. This method is deprecated - please use - # all_flags_state instead. Current versions of the client-side SDK will not generate analytics + # {#all_flags_state} instead. Current versions of the client-side SDK will not generate analytics # events correctly if you pass the result of all_flags. # + # @param user [Hash] The end user requesting the feature flags + # @return [Hash] a hash of feature flag keys to values + # def all_flags(user) all_flags_state(user).values_map end @@ -206,7 +209,13 @@ def all_flags(user) # including the flag values and also metadata that can be used on the front end. This method does not # send analytics events back to LaunchDarkly. # - def all_flags_state(user) + # @param user [Hash] The end user requesting the feature flags + # @param options={} [Hash] Optional parameters to control how the state is generated + # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the + # client-side SDK should be included in the state. By default, all flags are included. + # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON + # + def all_flags_state(user, options={}) return FeatureFlagsState.new(false) if @config.offline? unless user && !user[:key].nil? @@ -224,7 +233,11 @@ def all_flags_state(user) end state = FeatureFlagsState.new(true) + client_only = options[:client_side_only] || false features.each do |k, f| + if client_only && !f[:clientSide] + next + end begin result = evaluate(f, user, @store, @config.logger) state.add_flag(f, result[:value], result[:variation]) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 5dbb8195..ae76a678 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -166,6 +166,22 @@ def event_processor }) end + it "can be filtered for only client-side flags" do + flag1 = { key: "server-side-1", offVariation: 0, variations: [ 'a' ], clientSide: false } + flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } + flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } + flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } + config.feature_store.init({ LaunchDarkly::FEATURES => { + flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 + }}) + + state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) + end + it "returns empty state for nil user" do config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) From cee4c18aa0a6330cd3e24f6c9b11914cae57d34d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 19:58:42 -0700 Subject: [PATCH 011/182] implement evaluation with explanations --- lib/ldclient-rb/evaluation.rb | 185 +++++++++++++++++---------- lib/ldclient-rb/events.rb | 1 + lib/ldclient-rb/flags_state.rb | 3 +- lib/ldclient-rb/ldclient.rb | 124 ++++++++++-------- spec/evaluation_spec.rb | 221 +++++++++++++++++++-------------- spec/ldclient_spec.rb | 98 ++++++++++++--- 6 files changed, 403 insertions(+), 229 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index b4dd796c..b803f4a2 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -2,6 +2,31 @@ require "semantic" module LaunchDarkly + # An object returned by `LDClient.variation_detail`, combining the result of a flag evaluation with + # an explanation of how it was calculated. + class EvaluationDetail + def initialize(value, variation, reason) + @value = value + @variation = variation + @reason = reason + end + + # @return [Object] The result of the flag evaluation. This will be either one of the flag's + # variations or the default value that was passed to the `variation` method. + attr_reader :value + + # @return [int|nil] The index of the returned value within the flag's list of variations, e.g. + # 0 for the first variation - or `nil` if the default value was returned. + attr_reader :variation + + # @return [Hash] An object describing the main factor that influenced the flag evaluation value. + attr_reader :reason + + def ==(other) + @value == other.value && @variation == other.variation && @reason == other.reason + end + end + module Evaluation BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] @@ -110,101 +135,109 @@ def self.comparator(converter) class EvaluationError < StandardError end - # Evaluates a feature flag, returning a hash containing the evaluation result and any events - # generated during prerequisite evaluation. Raises EvaluationError if the flag is not well-formed - # Will return nil, but not raise an exception, indicating that the rules (including fallthrough) did not match - # In that case, the caller should return the default value. - def evaluate(flag, user, store, logger) - if flag.nil? - raise EvaluationError, "Flag does not exist" - end + # Used internally to hold an evaluation result and the events that were generated from prerequisites. + EvalResult = Struct.new(:detail, :events) + + def error_result(errorKind, value = nil) + EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) + end + # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns + # the default value. Error conditions produce a result with an error reason, not an exception. + def evaluate(flag, user, store, logger) if user.nil? || user[:key].nil? - raise EvaluationError, "Invalid user" + return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end events = [] if flag[:on] - res = eval_internal(flag, user, store, events, logger) - if !res.nil? - res[:events] = events - return res + detail = eval_internal(flag, user, store, events, logger) + return EvalResult.new(detail, events) + end + + return EvalResult.new(get_off_value(flag, { kind: 'OFF' }), events) + end + + + def eval_internal(flag, user, store, events, logger) + prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) + if !prereq_failure_reason.nil? + return get_off_value(flag, prereq_failure_reason) + end + + # Check user target matches + (flag[:targets] || []).each do |target| + (target[:values] || []).each do |value| + if value == user[:key] + return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + end + end + end + + # Check custom rules + rules = flag[:rules] || [] + rules.each_index do |i| + rule = rules[i] + if rule_match_user(rule, user, store) + return get_value_for_variation_or_rollout(flag, rule, user, + { kind: 'RULE_MATCH', ruleIndex: i, ruleId: rule[:id] }, logger) end end - offVariation = flag[:offVariation] - if !offVariation.nil? && offVariation < flag[:variations].length - value = flag[:variations][offVariation] - return { variation: offVariation, value: value, events: events } + # Check the fallthrough rule + if !flag[:fallthrough].nil? + return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, + { kind: 'FALLTHROUGH' }, logger) end - { variation: nil, value: nil, events: events } + return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) end - def eval_internal(flag, user, store, events, logger) - failed_prereq = false - # Evaluate prerequisites, if any + def check_prerequisites(flag, user, store, events, logger) + failed_prereqs = [] + (flag[:prerequisites] || []).each do |prerequisite| - prereq_flag = store.get(FEATURES, prerequisite[:key]) + prereq_ok = true + prereq_key = prerequisite[:key] + prereq_flag = store.get(FEATURES, prereq_key) if prereq_flag.nil? || !prereq_flag[:on] - failed_prereq = true + logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } + prereq_ok = false + elsif !prereq_flag[:on] + prereq_ok = false else begin prereq_res = eval_internal(prereq_flag, user, store, events, logger) event = { kind: "feature", - key: prereq_flag[:key], - variation: prereq_res.nil? ? nil : prereq_res[:variation], - value: prereq_res.nil? ? nil : prereq_res[:value], + key: prereq_key, + variation: prereq_res.variation, + value: prereq_res.value, version: prereq_flag[:version], prereqOf: flag[:key], trackEvents: prereq_flag[:trackEvents], debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] } events.push(event) - if prereq_res.nil? || prereq_res[:variation] != prerequisite[:variation] - failed_prereq = true + if prereq_res.variation != prerequisite[:variation] + prereq_ok = false end rescue => exn - logger.error { "[LDClient] Error evaluating prerequisite: #{exn.inspect}" } - failed_prereq = true + Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) + prereq_ok = false end end - end - - if failed_prereq - return nil - end - # The prerequisites were satisfied. - # Now walk through the evaluation steps and get the correct - # variation index - eval_rules(flag, user, store) - end - - def eval_rules(flag, user, store) - # Check user target matches - (flag[:targets] || []).each do |target| - (target[:values] || []).each do |value| - if value == user[:key] - return { variation: target[:variation], value: get_variation(flag, target[:variation]) } - end + if !prereq_ok + failed_prereqs.push(prereq_key) end end - - # Check custom rules - (flag[:rules] || []).each do |rule| - return variation_for_user(rule, user, flag) if rule_match_user(rule, user, store) - end - # Check the fallthrough rule - if !flag[:fallthrough].nil? - return variation_for_user(flag[:fallthrough], user, flag) + if failed_prereqs.empty? + return nil end - - # Not even the fallthrough matched-- return the off variation or default - nil + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: failed_prereqs } end def get_variation(flag, index) @@ -257,9 +290,9 @@ def clause_match_user_no_segments(clause, user) maybe_negate(clause, match_any(op, val, clause[:values])) end - def variation_for_user(rule, user, flag) + def variation_index_for_user(flag, rule, user) if !rule[:variation].nil? # fixed variation - return { variation: rule[:variation], value: get_variation(flag, rule[:variation]) } + return rule[:variation] elsif !rule[:rollout].nil? # percentage rollout rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] @@ -268,12 +301,12 @@ def variation_for_user(rule, user, flag) rollout[:variations].each do |variate| sum += variate[:weight].to_f / 100000.0 if bucket < sum - return { variation: variate[:variation], value: get_variation(flag, variate[:variation]) } + return variate[:variation] end end nil else # the rule isn't well-formed - raise EvaluationError, "Rule does not define a variation or rollout" + nil end end @@ -350,5 +383,31 @@ def match_any(op, value, values) end return false end + + :private + + def get_variation(flag, index, reason) + if index < 0 || index >= flag[:variations].length + logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") + return error_result('MALFORMED_FLAG') + end + EvaluationDetail.new(flag[:variations][index], index, reason) + end + + def get_off_value(flag, reason) + if flag[:offVariation].nil? # off variation unspecified - return default value + return EvaluationDetail.new(nil, nil, reason) + end + get_variation(flag, flag[:offVariation], reason) + end + + def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) + index = variation_index_for_user(flag, vr, user) + if index.nil? + logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") + return error_result('MALFORMED_FLAG') + end + return get_variation(flag, index, reason) + end end end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 202fc235..e19d6b02 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -363,6 +363,7 @@ def make_output_event(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end + out[:reason] = event[:reason] if !event[:reason].nil? out when "identify" { diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 09f88975..05079920 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -15,12 +15,13 @@ def initialize(valid) end # Used internally to build the state map. - def add_flag(flag, value, variation) + def add_flag(flag, value, variation, reason = nil) key = flag[:key] @flag_values[key] = value meta = { version: flag[:version], trackEvents: flag[:trackEvents] } meta[:variation] = variation if !variation.nil? meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + meta[:reason] = reason if !reason.nil? @flag_metadata[key] = meta end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index e9873679..8efd422a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -120,52 +120,11 @@ def initialized? # @return the variation to show the user, or the # default value if there's an an error def variation(key, user, default) - return default if @config.offline? - - if !initialized? - if @store.initialized? - @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } - else - @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) - return default - end - end - - sanitize_user(user) if !user.nil? - feature = @store.get(FEATURES, key) - - if feature.nil? - @config.logger.info { "[LDClient] Unknown feature flag #{key}. Returning default value" } - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) - return default - end - - unless user - @config.logger.error { "[LDClient] Must specify user" } - @event_processor.add_event(make_feature_event(feature, user, nil, default, default)) - return default - end + evaluate_internal(key, user, default, false).value + end - begin - res = evaluate(feature, user, @store, @config.logger) - if !res[:events].nil? - res[:events].each do |event| - @event_processor.add_event(event) - end - end - value = res[:value] - if value.nil? - @config.logger.debug { "[LDClient] Result value is null in toggle" } - value = default - end - @event_processor.add_event(make_feature_event(feature, user, res[:variation], value, default)) - return value - rescue => exn - Util.log_exception(@config.logger, "Error evaluating feature flag", exn) - @event_processor.add_event(make_feature_event(feature, user, nil, default, default)) - return default - end + def variation_detail(key, user, default) + evaluate_internal(key, user, default, true) end # @@ -213,6 +172,8 @@ def all_flags(user) # @param options={} [Hash] Optional parameters to control how the state is generated # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. + # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included + # in the state. By default, they are not included. # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON # def all_flags_state(user, options={}) @@ -234,16 +195,17 @@ def all_flags_state(user, options={}) state = FeatureFlagsState.new(true) client_only = options[:client_side_only] || false + with_reasons = options[:with_reasons] || false features.each do |k, f| if client_only && !f[:clientSide] next end begin result = evaluate(f, user, @store, @config.logger) - state.add_flag(f, result[:value], result[:variation]) + state.add_flag(f, result.detail.value, result.detail.variation, with_reasons ? result.detail.reason : nil) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) - state.add_flag(f, nil, nil) + state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil) end end @@ -261,27 +223,83 @@ def close @store.stop end + :private + + # @return [EvaluationDetail] + def evaluate_internal(key, user, default, include_reasons_in_events) + if @config.offline? + return error_result('CLIENT_NOT_READY', default) + end + + if !initialized? + if @store.initialized? + @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } + else + @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } + @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) + return error_result('CLIENT_NOT_READY', default) + end + end + + sanitize_user(user) if !user.nil? + feature = @store.get(FEATURES, key) + + if feature.nil? + @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } + detail = error_result('FLAG_NOT_FOUND', default) + @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user, + reason: include_reasons_in_events ? detail.reason : nil) + return detail + end + + unless user + @config.logger.error { "[LDClient] Must specify user" } + detail = error_result('USER_NOT_SPECIFIED', default) + @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + return detail + end + + begin + res = evaluate(feature, user, @store, @config.logger) + if !res.events.nil? + res.events.each do |event| + @event_processor.add_event(event) + end + end + detail = res.detail + if detail.variation.nil? + detail = EvaluationDetail.new(default, nil, detail.reason) + end + @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + return detail + rescue => exn + Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) + detail = error_result('EXCEPTION', default) + @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + return detail + end + end + def sanitize_user(user) if user[:key] user[:key] = user[:key].to_s end end - def make_feature_event(flag, user, variation, value, default) + def make_feature_event(flag, user, detail, default, with_reasons) { kind: "feature", key: flag[:key], user: user, - variation: variation, - value: value, + variation: detail.variation, + value: detail.value, default: default, version: flag[:version], trackEvents: flag[:trackEvents], - debugEventsUntilDate: flag[:debugEventsUntilDate] + debugEventsUntilDate: flag[:debugEventsUntilDate], + reason: with_reasons ? detail.reason : nil } end - - private :evaluate, :sanitize_user, :make_feature_event end # diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index a8d980ae..d5ee1097 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -2,6 +2,9 @@ describe LaunchDarkly::Evaluation do subject { LaunchDarkly::Evaluation } + + include LaunchDarkly::Evaluation + let(:features) { LaunchDarkly::InMemoryFeatureStore.new } let(:user) { @@ -14,7 +17,13 @@ let(:logger) { LaunchDarkly::Config.default_logger } - include LaunchDarkly::Evaluation + def boolean_flag_with_rules(rules) + { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } + end + + def boolean_flag_with_clauses(clauses) + boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) + end describe "evaluate" do it "returns off variation if flag is off" do @@ -26,7 +35,10 @@ variations: ['a', 'b', 'c'] } user = { key: 'x' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: []}) + detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'OFF' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "returns nil if flag is off and off variation is unspecified" do @@ -37,7 +49,10 @@ variations: ['a', 'b', 'c'] } user = { key: 'x' } - expect(evaluate(flag, user, features, logger)).to eq({variation: nil, value: nil, events: []}) + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'OFF' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "returns off variation if prerequisite is not found" do @@ -50,7 +65,11 @@ variations: ['a', 'b', 'c'] } user = { key: 'x' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: []}) + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['badfeature'] }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "returns off variation and event if prerequisite of a prerequisite is not found" do @@ -73,11 +92,15 @@ } features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: events_should_be}) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) end it "returns off variation and event if prerequisite is not met" do @@ -99,11 +122,15 @@ } features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: events_should_be}) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) end it "returns fallthrough variation and event if prerequisite is met and there are no rules" do @@ -125,11 +152,14 @@ } features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] - expect(evaluate(flag, user, features, logger)).to eq({variation: 0, value: 'a', events: events_should_be}) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) end it "matches user from targets" do @@ -144,57 +174,96 @@ variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 2, value: 'c', events: []}) + detail = LaunchDarkly::EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "matches user from rules" do - flag = { - key: 'feature0', - on: true, - rules: [ - { - clauses: [ - { - attribute: 'key', - op: 'in', - values: [ 'userkey' ] - } - ], - variation: 2 - } - ], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 2, value: 'c', events: []}) + detail = LaunchDarkly::EvaluationDetail.new(true, 1, + { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule variation is too high" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule variation is negative" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule has neither variation nor rollout" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule has a rollout with no variations" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [] } } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end end - describe "clause_match_user" do + describe "clause" do it "can match built-in attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } - expect(clause_match_user(clause, user, features)).to be true + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be true end it "can match custom attribute" do user = { key: 'x', name: 'Bob', custom: { legs: 4 } } clause = { attribute: 'legs', op: 'in', values: [4] } - expect(clause_match_user(clause, user, features)).to be true + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be true end it "returns false for missing attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'legs', op: 'in', values: [4] } - expect(clause_match_user(clause, user, features)).to be false + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } - expect(clause_match_user(clause, user, features)).to be false + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false end it "retrieves segment from segment store for segmentMatch operator" do @@ -208,23 +277,24 @@ user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - - expect(clause_match_user(clause, user, features)).to be true + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - - expect(clause_match_user(clause, user, features)).to be false + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = boolean_flag_with_clauses([clause]) expect { clause[:negate] = true - }.to change {clause_match_user(clause, user, features)}.from(true).to(false) + }.to change {evaluate(flag, user, features, logger).detail.value}.from(true).to(false) end end @@ -326,7 +396,8 @@ it "should return #{shouldBe} for #{value1} #{op} #{value2}" do user = { key: 'x', custom: { foo: value1 } } clause = { attribute: 'foo', op: op, values: [value2] } - expect(clause_match_user(clause, user, features)).to be shouldBe + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be shouldBe end end end @@ -385,17 +456,6 @@ end end - def make_flag(key) - { - key: key, - rules: [], - variations: [ false, true ], - on: true, - fallthrough: { variation: 0 }, - version: 1 - } - end - def make_segment(key) { key: key, @@ -424,35 +484,30 @@ def make_user_matching_clause(user, attr) end describe 'segment matching' do - it 'explicitly includes user' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] + def test_segment_match(segment) features.upsert(LaunchDarkly::SEGMENTS, segment) clause = make_segment_match_clause(segment) + flag = boolean_flag_with_clauses([clause]) + evaluate(flag, user, features, logger).detail.value + end - result = clause_match_user(clause, user, features) - expect(result).to be true + it 'explicitly includes user' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + expect(test_segment_match(segment)).to be true end it 'explicitly excludes user' do segment = make_segment('segkey') segment[:excluded] = [ user[:key] ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be false + expect(test_segment_match(segment)).to be false end it 'both includes and excludes user; include takes priority' do segment = make_segment('segkey') segment[:included] = [ user[:key] ] segment[:excluded] = [ user[:key] ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it 'matches user by rule when weight is absent' do @@ -462,11 +517,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it 'matches user by rule when weight is nil' do @@ -477,11 +528,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it 'matches user with full rollout' do @@ -492,11 +539,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it "doesn't match user with zero rollout" do @@ -507,11 +550,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be false + expect(test_segment_match(segment)).to be false end it "matches user with multiple clauses" do @@ -522,11 +561,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it "doesn't match user with multiple clauses if a clause doesn't match" do @@ -538,11 +573,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be false + expect(test_segment_match(segment)).to be false end end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index ae76a678..efaa1438 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -34,11 +34,18 @@ def event_processor end describe '#variation' do - it "will return the default value if the client is offline" do + feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, + trackEvents: true, debugEventsUntilDate: 1000 } + + it "returns the default value if the client is offline" do result = offline_client.variation("doesntmatter", user, "default") expect(result).to eq "default" end + it "returns the default value for an unknown feature" do + expect(client.variation("badkey", user, "default")).to eq "default" + end + it "queues a feature request event for an unknown feature" do expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "badkey", user: user, value: "default", default: "default" @@ -46,56 +53,113 @@ def event_processor client.variation("badkey", user, "default") end + it "returns the value for an existing feature" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(client.variation("key", user, "default")).to eq "value" + end + it "queues a feature request event for an existing feature" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", - key: feature[:key], - version: feature[:version], + key: "key", + version: 100, user: user, variation: 0, - value: true, + value: "value", default: "default", trackEvents: true, - debugEventsUntilDate: nil + debugEventsUntilDate: 1000 )) - client.variation(feature[:key], user, "default") + client.variation("key", user, "default") end it "queues a feature event for an existing feature when user is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", - key: feature[:key], - version: feature[:version], + key: "key", + version: 100, user: nil, variation: nil, value: "default", default: "default", trackEvents: true, - debugEventsUntilDate: nil + debugEventsUntilDate: 1000 )) - client.variation(feature[:key], nil, "default") + client.variation("key", nil, "default") end it "queues a feature event for an existing feature when user key is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) bad_user = { name: "Bob" } expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", - key: feature[:key], - version: feature[:version], + key: "key", + version: 100, user: bad_user, variation: nil, value: "default", default: "default", trackEvents: true, - debugEventsUntilDate: nil + debugEventsUntilDate: 1000 + )) + client.variation("key", bad_user, "default") + end + end + + describe '#variation_detail' do + feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, + trackEvents: true, debugEventsUntilDate: 1000 } + + it "returns the default value if the client is offline" do + result = offline_client.variation_detail("doesntmatter", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'CLIENT_NOT_READY' }) + expect(result).to eq expected + end + + it "returns the default value for an unknown feature" do + result = client.variation_detail("badkey", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND'}) + expect(result).to eq expected + end + + it "queues a feature request event for an unknown feature" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "feature", key: "badkey", user: user, value: "default", default: "default", + reason: { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND' } + )) + client.variation_detail("badkey", user, "default") + end + + it "returns a value for an existing feature" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + result = client.variation_detail("key", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("value", 0, { kind: 'OFF' }) + expect(result).to eq expected + end + + it "queues a feature request event for an existing feature" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "feature", + key: "key", + version: 100, + user: user, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + debugEventsUntilDate: 1000, + reason: { kind: "OFF" } )) - client.variation(feature[:key], bad_user, "default") + client.variation_detail("key", user, "default") end end From d2c2ab81abd6e19934a2e444993cef1e1285e069 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 20:03:36 -0700 Subject: [PATCH 012/182] misc cleanup --- lib/ldclient-rb/evaluation.rb | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index b803f4a2..7a316aca 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -156,21 +156,21 @@ def evaluate(flag, user, store, logger) return EvalResult.new(detail, events) end - return EvalResult.new(get_off_value(flag, { kind: 'OFF' }), events) + return EvalResult.new(get_off_value(flag, { kind: 'OFF' }, logger), events) end def eval_internal(flag, user, store, events, logger) prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) if !prereq_failure_reason.nil? - return get_off_value(flag, prereq_failure_reason) + return get_off_value(flag, prereq_failure_reason, logger) end # Check user target matches (flag[:targets] || []).each do |target| (target[:values] || []).each do |value| if value == user[:key] - return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }, logger) end end end @@ -240,13 +240,6 @@ def check_prerequisites(flag, user, store, events, logger) { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: failed_prereqs } end - def get_variation(flag, index) - if index >= flag[:variations].length - raise EvaluationError, "Invalid variation index" - end - flag[:variations][index] - end - def rule_match_user(rule, user, store) return false if !rule[:clauses] @@ -386,7 +379,7 @@ def match_any(op, value, values) :private - def get_variation(flag, index, reason) + def get_variation(flag, index, reason, logger) if index < 0 || index >= flag[:variations].length logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") return error_result('MALFORMED_FLAG') @@ -394,11 +387,11 @@ def get_variation(flag, index, reason) EvaluationDetail.new(flag[:variations][index], index, reason) end - def get_off_value(flag, reason) + def get_off_value(flag, reason, logger) if flag[:offVariation].nil? # off variation unspecified - return default value return EvaluationDetail.new(nil, nil, reason) end - get_variation(flag, flag[:offVariation], reason) + get_variation(flag, flag[:offVariation], reason, logger) end def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) @@ -407,7 +400,7 @@ def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") return error_result('MALFORMED_FLAG') end - return get_variation(flag, index, reason) + return get_variation(flag, index, reason, logger) end end end From 64a00a1a9388e85cb26e5650da97fa2029198d64 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 20:14:37 -0700 Subject: [PATCH 013/182] misc cleanup, more error checking --- lib/ldclient-rb/evaluation.rb | 6 +- spec/evaluation_spec.rb | 111 +++++++++++++++++++++++++++++++++- 2 files changed, 111 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 7a316aca..7dfbc3db 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -132,9 +132,6 @@ def self.comparator(converter) end } - class EvaluationError < StandardError - end - # Used internally to hold an evaluation result and the events that were generated from prerequisites. EvalResult = Struct.new(:detail, :events) @@ -268,9 +265,8 @@ def clause_match_user_no_segments(clause, user) return false if val.nil? op = OPERATORS[clause[:op].to_sym] - if op.nil? - raise EvaluationError, "Unsupported operator #{clause[:op]} in evaluation" + return false end if val.is_a? Enumerable diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index d5ee1097..9cb148ff 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -55,6 +55,38 @@ def boolean_flag_with_clauses(clauses) expect(result.events).to eq([]) end + it "returns an error if off variation is too high" do + flag = { + key: 'feature', + on: false, + offVariation: 999, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if off variation is negative" do + flag = { + key: 'feature', + on: false, + offVariation: -1, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + it "returns off variation if prerequisite is not found" do flag = { key: 'feature0', @@ -162,9 +194,69 @@ def boolean_flag_with_clauses(clauses) expect(result.events).to eq(events_should_be) end + it "returns an error if fallthrough variation is too high" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: 999 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if fallthrough variation is negative" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: -1 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if fallthrough has no variation or rollout" do + flag = { + key: 'feature', + on: true, + fallthrough: { }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if fallthrough has a rollout with no variations" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { variations: [] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + it "matches user from targets" do flag = { - key: 'feature0', + key: 'feature', on: true, targets: [ { values: [ 'whoever', 'userkey' ], variation: 2 } @@ -259,6 +351,23 @@ def boolean_flag_with_clauses(clauses) expect(evaluate(flag, user, features, logger).detail.value).to be false end + it "returns false for unknown operator" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'unknown', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false + end + + it "does not stop evaluating rules after clause with unknown operator" do + user = { key: 'x', name: 'Bob' } + clause0 = { attribute: 'name', op: 'unknown', values: [4] } + rule0 = { clauses: [ clause0 ], variation: 1 } + clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } + rule1 = { clauses: [ clause1 ], variation: 1 } + flag = boolean_flag_with_rules([rule0, rule1]) + expect(evaluate(flag, user, features, logger).detail.value).to be true + end + it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } From 46b642b0c0498bfba69577a544226a33f9095cd6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 20:49:58 -0700 Subject: [PATCH 014/182] don't keep evaluating prerequisites if one fails --- lib/ldclient-rb/evaluation.rb | 10 ++-------- spec/evaluation_spec.rb | 6 +++--- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 7dfbc3db..51cf3c66 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -192,8 +192,6 @@ def eval_internal(flag, user, store, events, logger) end def check_prerequisites(flag, user, store, events, logger) - failed_prereqs = [] - (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -227,14 +225,10 @@ def check_prerequisites(flag, user, store, events, logger) end end if !prereq_ok - failed_prereqs.push(prereq_key) + return { kind: 'PREREQUISITE_FAILED', prerequisiteKey: prereq_key } end end - - if failed_prereqs.empty? - return nil - end - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: failed_prereqs } + nil end def rule_match_user(rule, user, store) diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 9cb148ff..7f0c82b4 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -98,7 +98,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['badfeature'] }) + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) result = evaluate(flag, user, features, logger) expect(result.detail).to eq(detail) expect(result.events).to eq([]) @@ -125,7 +125,7 @@ def boolean_flag_with_clauses(clauses) features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil @@ -155,7 +155,7 @@ def boolean_flag_with_clauses(clauses) features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil From 855c4e2be634b475957d46cda6870d1c52b326ed Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 22:28:28 -0700 Subject: [PATCH 015/182] doc comment --- lib/ldclient-rb/ldclient.rb | 40 ++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 8efd422a..1d5c23a1 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -115,7 +115,7 @@ def initialized? # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag - # @param default=false the default value of the flag + # @param default the default value of the flag # # @return the variation to show the user, or the # default value if there's an an error @@ -123,6 +123,44 @@ def variation(key, user, default) evaluate_internal(key, user, default, false).value end + # + # Determines the variation of a feature flag for a user, like `variation`, but also + # provides additional information about how this value was calculated. + # + # The return value of `variation_detail` is an `EvaluationDetail` object, which has + # three properties: + # + # `value`: the value that was calculated for this user (same as the return value + # of `variation`) + # + # `variation`: the positional index of this value in the flag, e.g. 0 for the first + # variation - or `nil` if it is the default value + # + # `reason`: a hash describing the main reason why this value was selected. Its `:kind` + # property will be one of the following: + # + # * `'OFF'`: the flag was off and therefore returned its configured off value + # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules + # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag + # * `'RULE_MATCH'`: the user matched one of the flag's rules; the `:ruleIndex` and + # `:ruleId` properties indicate the positional index and unique identifier of the rule + # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation; the + # `:prerequisiteKey` property indicates the key of the prerequisite that failed + # * `'ERROR'`: the flag could not be evaluated, e.g. because it does not exist or due + # to an unexpected error, and therefore returned the default value; the `:errorKind` + # property describes the nature of the error, such as `'FLAG_NOT_FOUND'` + # + # The `reason` will also be included in analytics events, if you are capturing + # detailed event data for this flag. + # + # @param key [String] the unique feature key for the feature flag, as shown + # on the LaunchDarkly dashboard + # @param user [Hash] a hash containing parameters for the end user requesting the flag + # @param default the default value of the flag + # + # @return an `EvaluationDetail` object describing the result + # def variation_detail(key, user, default) evaluate_internal(key, user, default, true) end From a0f002f3c1e1cdb8313b5f116d9ba909e4d0e17d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 22:34:25 -0700 Subject: [PATCH 016/182] rename variation to variation_index --- lib/ldclient-rb/evaluation.rb | 12 ++++++------ lib/ldclient-rb/ldclient.rb | 10 +++++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 51cf3c66..bd4544dc 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -5,9 +5,9 @@ module LaunchDarkly # An object returned by `LDClient.variation_detail`, combining the result of a flag evaluation with # an explanation of how it was calculated. class EvaluationDetail - def initialize(value, variation, reason) + def initialize(value, variation_index, reason) @value = value - @variation = variation + @variation_index = variation_index @reason = reason end @@ -17,13 +17,13 @@ def initialize(value, variation, reason) # @return [int|nil] The index of the returned value within the flag's list of variations, e.g. # 0 for the first variation - or `nil` if the default value was returned. - attr_reader :variation + attr_reader :variation_index # @return [Hash] An object describing the main factor that influenced the flag evaluation value. attr_reader :reason def ==(other) - @value == other.value && @variation == other.variation && @reason == other.reason + @value == other.value && @variation_index == other.variation_index && @reason == other.reason end end @@ -208,7 +208,7 @@ def check_prerequisites(flag, user, store, events, logger) event = { kind: "feature", key: prereq_key, - variation: prereq_res.variation, + variation: prereq_res.variation_index, value: prereq_res.value, version: prereq_flag[:version], prereqOf: flag[:key], @@ -216,7 +216,7 @@ def check_prerequisites(flag, user, store, events, logger) debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] } events.push(event) - if prereq_res.variation != prerequisite[:variation] + if prereq_res.variation_index != prerequisite[:variation] prereq_ok = false end rescue => exn diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1d5c23a1..177b91a2 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -133,8 +133,8 @@ def variation(key, user, default) # `value`: the value that was calculated for this user (same as the return value # of `variation`) # - # `variation`: the positional index of this value in the flag, e.g. 0 for the first - # variation - or `nil` if it is the default value + # `variation_index`: the positional index of this value in the flag, e.g. 0 for the + # first variation - or `nil` if the default value was returned # # `reason`: a hash describing the main reason why this value was selected. Its `:kind` # property will be one of the following: @@ -240,7 +240,7 @@ def all_flags_state(user, options={}) end begin result = evaluate(f, user, @store, @config.logger) - state.add_flag(f, result.detail.value, result.detail.variation, with_reasons ? result.detail.reason : nil) + state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil) @@ -305,7 +305,7 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end detail = res.detail - if detail.variation.nil? + if detail.variation_index.nil? detail = EvaluationDetail.new(default, nil, detail.reason) end @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) @@ -329,7 +329,7 @@ def make_feature_event(flag, user, detail, default, with_reasons) kind: "feature", key: flag[:key], user: user, - variation: detail.variation, + variation: detail.variation_index, value: detail.value, default: default, version: flag[:version], From 4ec43db7e4b7d58ad04bf5f9dde015f0eed0a816 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 22:44:24 -0700 Subject: [PATCH 017/182] comment --- lib/ldclient-rb/ldclient.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 177b91a2..1c2d2257 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -211,7 +211,7 @@ def all_flags(user) # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included - # in the state. By default, they are not included. + # in the state (see `variation_detail`). By default, they are not included. # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON # def all_flags_state(user, options={}) From 9622e0116f5b4a513e705630a19603842d07cd75 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 23 Aug 2018 17:11:29 -0700 Subject: [PATCH 018/182] more test coverage, convenience method --- lib/ldclient-rb/evaluation.rb | 6 ++++++ lib/ldclient-rb/ldclient.rb | 2 +- spec/ldclient_spec.rb | 17 +++++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index bd4544dc..4f6cbb0e 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -22,6 +22,12 @@ def initialize(value, variation_index, reason) # @return [Hash] An object describing the main factor that influenced the flag evaluation value. attr_reader :reason + # @return [boolean] True if the flag evaluated to the default value rather than to one of its + # variations. + def default_value? + variation_index.nil? + end + def ==(other) @value == other.value && @variation_index == other.variation_index && @reason == other.reason end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1c2d2257..a87344ed 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -305,7 +305,7 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end detail = res.detail - if detail.variation_index.nil? + if detail.default_value? detail = EvaluationDetail.new(default, nil, detail.reason) end @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index efaa1438..d76f7834 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -59,6 +59,13 @@ def event_processor expect(client.variation("key", user, "default")).to eq "value" end + it "returns the default value if a feature evaluates to nil" do + empty_feature = { key: "key", on: false, offVariation: nil } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + expect(client.variation("key", user, "default")).to eq "default" + end + it "queues a feature request event for an existing feature" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) @@ -144,6 +151,16 @@ def event_processor expect(result).to eq expected end + it "returns the default value if a feature evaluates to nil" do + empty_feature = { key: "key", on: false, offVariation: nil } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + result = client.variation_detail("key", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'OFF' }) + expect(result).to eq expected + expect(result.default_value?).to be true + end + it "queues a feature request event for an existing feature" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) From 084d9eacf32a6cc36ff1a150dc3bef9190ba2b64 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 29 Aug 2018 11:25:31 -0700 Subject: [PATCH 019/182] fix event generation for a prerequisite that is off --- lib/ldclient-rb/evaluation.rb | 26 ++++++++++++-------------- spec/evaluation_spec.rb | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 14 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 4f6cbb0e..aa4eb20d 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -153,17 +153,15 @@ def evaluate(flag, user, store, logger) end events = [] - - if flag[:on] - detail = eval_internal(flag, user, store, events, logger) - return EvalResult.new(detail, events) - end - - return EvalResult.new(get_off_value(flag, { kind: 'OFF' }, logger), events) + detail = eval_internal(flag, user, store, events, logger) + return EvalResult.new(detail, events) end - def eval_internal(flag, user, store, events, logger) + if !flag[:on] + return get_off_value(flag, { kind: 'OFF' }, logger) + end + prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) if !prereq_failure_reason.nil? return get_off_value(flag, prereq_failure_reason, logger) @@ -203,14 +201,17 @@ def check_prerequisites(flag, user, store, events, logger) prereq_key = prerequisite[:key] prereq_flag = store.get(FEATURES, prereq_key) - if prereq_flag.nil? || !prereq_flag[:on] + if prereq_flag.nil? logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } prereq_ok = false - elsif !prereq_flag[:on] - prereq_ok = false else begin prereq_res = eval_internal(prereq_flag, user, store, events, logger) + # Note that if the prerequisite flag is off, we don't consider it a match no matter what its + # off variation was. But we still need to evaluate it in order to generate an event. + if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] + prereq_ok = false + end event = { kind: "feature", key: prereq_key, @@ -222,9 +223,6 @@ def check_prerequisites(flag, user, store, events, logger) debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] } events.push(event) - if prereq_res.variation_index != prerequisite[:variation] - prereq_ok = false - end rescue => exn Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) prereq_ok = false diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 7f0c82b4..3af960c6 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -135,6 +135,38 @@ def boolean_flag_with_clauses(clauses) expect(result.events).to eq(events_should_be) end + it "returns off variation and event if prerequisite is off" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: false, + # note that even though it returns the desired variation, it is still off and therefore not a match + offVariation: 1, + fallthrough: { variation: 0 }, + variations: ['d', 'e'], + version: 2 + } + features.upsert(LaunchDarkly::FEATURES, flag1) + user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', + trackEvents: nil, debugEventsUntilDate: nil + }] + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + it "returns off variation and event if prerequisite is not met" do flag = { key: 'feature0', From 02b5712c434c7a4e6524d6e3752c09be4437feca Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 29 Aug 2018 11:27:26 -0700 Subject: [PATCH 020/182] fix private --- lib/ldclient-rb/evaluation.rb | 2 +- lib/ldclient-rb/ldclient.rb | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 4f6cbb0e..a16d9adb 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -373,7 +373,7 @@ def match_any(op, value, values) return false end - :private + private def get_variation(flag, index, reason, logger) if index < 0 || index >= flag[:variations].length diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a87344ed..7e86662b 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -261,7 +261,7 @@ def close @store.stop end - :private + private # @return [EvaluationDetail] def evaluate_internal(key, user, default, include_reasons_in_events) From 88676380bed1f147d04c8852f58ddb4f294e0eb5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 4 Oct 2018 19:04:17 -0700 Subject: [PATCH 021/182] add option to reduce front-end metadata for untracked flags --- lib/ldclient-rb/flags_state.rb | 10 ++++++--- lib/ldclient-rb/ldclient.rb | 10 +++++++-- spec/flags_state_spec.rb | 3 +-- spec/ldclient_spec.rb | 41 ++++++++++++++++++++++++++++++++-- 4 files changed, 55 insertions(+), 9 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 05079920..a6036bde 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -15,13 +15,17 @@ def initialize(valid) end # Used internally to build the state map. - def add_flag(flag, value, variation, reason = nil) + def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = false) key = flag[:key] @flag_values[key] = value - meta = { version: flag[:version], trackEvents: flag[:trackEvents] } + meta = {} + if !details_only_if_tracked || flag[:trackEvents] || flag[:debugEventsUntilDate] + meta[:version] = flag[:version] + meta[:reason] = reason if !reason.nil? + end meta[:variation] = variation if !variation.nil? + meta[:trackEvents] = true if flag[:trackEvents] meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] - meta[:reason] = reason if !reason.nil? @flag_metadata[key] = meta end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 7e86662b..4ad7928e 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -212,6 +212,10 @@ def all_flags(user) # client-side SDK should be included in the state. By default, all flags are included. # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included # in the state (see `variation_detail`). By default, they are not included. + # @option options [Boolean] :details_only_for_tracked_flags (false) True if any flag metadata that is + # normally only used for event generation - such as flag versions and evaluation reasons - should be + # omitted for any flag that does not have event tracking or debugging turned on. This reduces the size + # of the JSON data if you are passing the flag state to the front end. # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON # def all_flags_state(user, options={}) @@ -234,16 +238,18 @@ def all_flags_state(user, options={}) state = FeatureFlagsState.new(true) client_only = options[:client_side_only] || false with_reasons = options[:with_reasons] || false + details_only_if_tracked = options[:details_only_for_tracked_flags] || false features.each do |k, f| if client_only && !f[:clientSide] next end begin result = evaluate(f, user, @store, @config.logger) - state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil) + state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil, + details_only_if_tracked) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) - state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil) + state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil, details_only_if_tracked) end end diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index 3d21029b..bda55b11 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -42,8 +42,7 @@ '$flagsState' => { 'key1' => { :variation => 0, - :version => 100, - :trackEvents => false + :version => 100 }, 'key2' => { :variation => 1, diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index d76f7834..6b923775 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -233,8 +233,7 @@ def event_processor '$flagsState' => { 'key1' => { :variation => 0, - :version => 100, - :trackEvents => false + :version => 100 }, 'key2' => { :variation => 1, @@ -263,6 +262,44 @@ def event_processor expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) end + it "can omit details for untracked flags" do + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } + flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: 1000 } + + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) + + state = client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) + + result = state.as_json + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + 'key3' => 'value3', + '$flagsState' => { + 'key1' => { + :variation => 0, + :version => 100 + }, + 'key2' => { + :variation => 1, + :version => 200, + :trackEvents => true + }, + 'key3' => { + :variation => 1, + :version => 300, + :debugEventsUntilDate => 1000 + } + }, + '$valid' => true + }) + end + it "returns empty state for nil user" do config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) From 9ea43e022a331d7c5ad577aad0b6d68d59ca22bd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 8 Oct 2018 16:42:06 -0700 Subject: [PATCH 022/182] fix logic for whether a flag is tracked in all_flags_state --- lib/ldclient-rb/flags_state.rb | 6 +++++- spec/ldclient_spec.rb | 10 +++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index a6036bde..b761149c 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -19,7 +19,11 @@ def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = fal key = flag[:key] @flag_values[key] = value meta = {} - if !details_only_if_tracked || flag[:trackEvents] || flag[:debugEventsUntilDate] + with_details = !details_only_if_tracked || flag[:trackEvents] + if !with_details && flag[:debugEventsUntilDate] + with_details = flag[:debugEventsUntilDate] > (Time.now.to_f * 1000).to_i + end + if with_details meta[:version] = flag[:version] meta[:reason] = reason if !reason.nil? end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 6b923775..262f53f9 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -263,13 +263,14 @@ def event_processor end it "can omit details for untracked flags" do + future_time = (Time.now.to_f * 1000).to_i + 100000 flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } - flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: 1000 } + flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) - state = client.all_flags_state({ key: 'userkey' }) + state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) expect(state.valid?).to be true values = state.values_map @@ -282,8 +283,7 @@ def event_processor 'key3' => 'value3', '$flagsState' => { 'key1' => { - :variation => 0, - :version => 100 + :variation => 0 }, 'key2' => { :variation => 1, @@ -293,7 +293,7 @@ def event_processor 'key3' => { :variation => 1, :version => 300, - :debugEventsUntilDate => 1000 + :debugEventsUntilDate => future_time } }, '$valid' => true From cce8e84964835b8d6d02ddff612a1af1e179e1c9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 10:23:48 -0700 Subject: [PATCH 023/182] implement file data source --- ldclient-rb.gemspec | 1 + lib/ldclient-rb.rb | 1 + lib/ldclient-rb/config.rb | 10 +- lib/ldclient-rb/file_data_source.rb | 209 ++++++++++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 33 +++-- spec/file_data_source_spec.rb | 167 ++++++++++++++++++++++ 6 files changed, 404 insertions(+), 17 deletions(-) create mode 100644 lib/ldclient-rb/file_data_source.rb create mode 100644 spec/file_data_source_spec.rb diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index a9bbfb23..9e7d5d04 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -40,4 +40,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "hashdiff", "~> 0.2" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" + spec.add_runtime_dependency "listen", "~> 3.0" end diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 7264b220..d3ee6ffc 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -18,3 +18,4 @@ require "ldclient-rb/events" require "ldclient-rb/redis_store" require "ldclient-rb/requestor" +require "ldclient-rb/file_data_source" diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 3b62b2a3..dc89d30a 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -61,8 +61,11 @@ class Config # @option opts [Boolean] :inline_users_in_events (false) Whether to include full user details in every # analytics event. By default, events will only include the user key, except for one "index" event # that provides the full details for the user. - # @option opts [Object] :update_processor An object that will receive feature flag data from LaunchDarkly. - # Defaults to either the streaming or the polling processor, can be customized for tests. + # @option opts [Object] :update_processor (DEPRECATED) An object that will receive feature flag data from + # LaunchDarkly. Defaults to either the streaming or the polling processor, can be customized for tests. + # @option opts [Object] :update_processor_factory A function that takes the SDK and configuration object + # as parameters, and returns an object that can obtain feature flag data and put it into the feature + # store. Defaults to creating either the streaming or the polling processor, can be customized for tests. # @return [type] [description] # rubocop:disable Metrics/AbcSize, Metrics/PerceivedComplexity def initialize(opts = {}) @@ -88,6 +91,7 @@ def initialize(opts = {}) @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false @update_processor = opts[:update_processor] + @update_processor_factory = opts[:update_processor_factory] end # @@ -218,6 +222,8 @@ def offline? attr_reader :update_processor + attr_reader :update_processor_factory + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb new file mode 100644 index 00000000..65ba0735 --- /dev/null +++ b/lib/ldclient-rb/file_data_source.rb @@ -0,0 +1,209 @@ +require 'concurrent/atomics' +require 'json' +require 'yaml' +require 'listen' +require 'pathname' + +module LaunchDarkly + + # + # Provides a way to use local files as a source of feature flag state. This would typically be + # used in a test environment, to operate using a predetermined feature flag state without an + # actual LaunchDarkly connection. + # + # To use this component, call `FileDataSource.factory`, and store its return value in the + # `update_processor_factory` property of your LaunchDarkly client configuration. In the options + # to `factory`, set `paths` to the file path(s) of your data file(s): + # + # config.update_processor_factory = FileDataSource.factory(paths: [ myFilePath ]) + # + # This will cause the client not to connect to LaunchDarkly to get feature flags. The + # client may still make network connections to send analytics events, unless you have disabled + # this with Config.send_events or Config.offline. + # + # Flag data files can be either JSON or YAML. They contain an object with three possible + # properties: + # + # - "flags": Feature flag definitions. + # - "flagValues": Simplified feature flags that contain only a value. + # - "segments": User segment definitions. + # + # The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + # and is subject to change. Rather than trying to construct these objects yourself, it is simpler + # to request existing flags directly from the LaunchDarkly server in JSON format, and use this + # output as the starting point for your file. In Linux you would do this: + # + # curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + # + # The output will look something like this (but with many more properties): + # + # { + # "flags": { + # "flag-key-1": { + # "key": "flag-key-1", + # "on": true, + # "variations": [ "a", "b" ] + # } + # }, + # "segments": { + # "segment-key-1": { + # "key": "segment-key-1", + # "includes": [ "user-key-1" ] + # } + # } + # } + # + # Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + # by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + # set specific flag keys to specific values. For that, you can use a much simpler format: + # + # { + # "flagValues": { + # "my-string-flag-key": "value-1", + # "my-boolean-flag-key": true, + # "my-integer-flag-key": 3 + # } + # } + # + # Or, in YAML: + # + # flagValues: + # my-string-flag-key: "value-1" + # my-boolean-flag-key: true + # my-integer-flag-key: 1 + # + # It is also possible to specify both "flags" and "flagValues", if you want some flags + # to have simple values and others to have complex behavior. However, it is an error to use the + # same flag key or segment key more than once, either in a single file or across multiple files. + # + # If the data source encounters any error in any file-- malformed content, a missing file, or a + # duplicate key-- it will not load flags from any of the files. + # + class FileDataSource + def self.factory(options={}) + return Proc.new do |sdk_key, config| + FileDataSourceImpl.new(config.feature_store, config.logger, options) + end + end + end + + class FileDataSourceImpl + def initialize(feature_store, logger, options={}) + @feature_store = feature_store + @logger = logger + @paths = options[:paths] || [] + @auto_update = options[:auto_update] + @initialized = Concurrent::AtomicBoolean.new(false) + @ready = Concurrent::Event.new + end + + def initialized? + @initialized.value + end + + def start + ready = Concurrent::Event.new + + # We will return immediately regardless of whether the file load succeeded or failed - + # the difference can be detected by checking "initialized?" + ready.set + + load_all + + if @auto_update + # If we're going to watch files, then the start event will be set the first time we get + # a successful load. + @listener = start_listener + end + + ready + end + + def stop + @listener.stop if !@listener.nil? + end + + private + + def load_all + all_data = { + FEATURES => {}, + SEGMENTS => {} + } + @paths.each do |path| + begin + load_file(path, all_data) + rescue => exn + Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn) + return + end + end + @feature_store.init(all_data) + @initialized.make_true + end + + def load_file(path, all_data) + parsed = parse_content(IO.read(path)) + (parsed[:flags] || {}).each do |key, flag| + add_item(all_data, FEATURES, flag) + end + (parsed[:flagValues] || {}).each do |key, value| + add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value)) + end + (parsed[:segments] || {}).each do |key, segment| + add_item(all_data, SEGMENTS, segment) + end + end + + def parse_content(content) + if content.strip.start_with?("{") + JSON.parse(content, symbolize_names: true) + else + symbolize_all_keys(YAML.load(content)) + end + end + + def symbolize_all_keys(value) + # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and + # the SDK expects all objects to be formatted that way. + if value.is_a?(Hash) + value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h + elsif value.is_a?(Array) + value.map{ |v| symbolize_all_keys(v) } + else + value + end + end + + def add_item(all_data, kind, item) + items = all_data[kind] || {} + if !items[item[:key]].nil? + raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" + end + items[item[:key]] = item + end + + def make_flag_with_value(key, value) + { + key: key, + on: true, + fallthrough: { variation: 0 }, + variations: [ value ] + } + end + + def start_listener + resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } + path_set = resolved_paths.to_set + dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq + l = Listen.to(*dir_paths) do |modified, added, removed| + paths = modified + added + removed + if paths.any? { |p| path_set.include?(p) } + load_all + end + end + l.start + l + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 4ad7928e..94c24229 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -39,22 +39,11 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) return # requestor and update processor are not used in this mode end - requestor = Requestor.new(sdk_key, config) - - if @config.offline? - @update_processor = NullUpdateProcessor.new + if @config.update_processor + @update_processor = @config.update_processor else - if @config.update_processor.nil? - if @config.stream? - @update_processor = StreamProcessor.new(sdk_key, config, requestor) - else - @config.logger.info { "Disabling streaming API" } - @config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } - @update_processor = PollingProcessor.new(config, requestor) - end - else - @update_processor = @config.update_processor - end + factory = @config.update_processor || self.method(:create_default_update_processor) + @update_processor = factory.call(sdk_key, config) end ready = @update_processor.start @@ -269,6 +258,20 @@ def close private + def create_default_update_processor(sdk_key, config) + if config.offline? + return NullUpdateProcessor.new + end + requestor = Requestor.new(sdk_key, config) + if config.stream? + StreamProcessor.new(sdk_key, config, requestor) + else + config.logger.info { "Disabling streaming API" } + config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } + PollingProcessor.new(config, requestor) + end + end + # @return [EvaluationDetail] def evaluate_internal(key, user, default, include_reasons_in_events) if @config.offline? diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb new file mode 100644 index 00000000..c163d385 --- /dev/null +++ b/spec/file_data_source_spec.rb @@ -0,0 +1,167 @@ +require "spec_helper" +require "tempfile" + +describe LaunchDarkly::FileDataSource do + let(:full_flag_1_key) { "flag1" } + let(:flag_value_1_key) { "flag2" } + let(:all_flag_keys) { [ full_flag_1_key, flag_value_1_key ] } + let(:full_segment_1_key) { "seg1" } + let(:all_segment_keys) { [ full_segment_1_key ] } + + let(:flag_only_json) { <<-EOF + { + "flags": { + "flag1": { + "key": "flag1", + "on": true + } + } + } +EOF +} + + let(:all_properties_json) { <<-EOF + { + "flags": { + "flag1": { + "key": "flag1", + "on": true + } + }, + "flagValues": { + "flag2": "value2" + }, + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +EOF + } + + let(:all_properties_yaml) { <<-EOF +--- +flags: + flag1: + key: flag1 + "on": true +flagValues: + flag2: value2 +segments: + seg1: + key: seg1 + include: ["user1"] +EOF + } + + let(:bad_file_path) { "no-such-file" } + + before do + @config = LaunchDarkly::Config.new + @store = @config.feature_store + end + + def make_temp_file(content) + file = Tempfile.new('flags') + IO.write(file, content) + file + end + + def with_data_source(options) + factory = LaunchDarkly::FileDataSource.factory(options) + ds = factory.call('', @config) + begin + yield ds + ensure + ds.stop + end + end + + it "doesn't load flags prior to start" do + file = make_temp_file('{"flagValues":{"key":"value"}}') + with_data_source({ paths: [ file.path ] }) do |ds| + expect(@store.initialized?).to eq(false) + expect(@store.all(LaunchDarkly::FEATURES)).to eq({}) + expect(@store.all(LaunchDarkly::SEGMENTS)).to eq({}) + end + end + + it "loads flags on start - from JSON" do + file = make_temp_file(all_properties_json) + with_data_source({ paths: [ file.path ] }) do |ds| + ds.start + expect(@store.initialized?).to eq(true) + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys) + end + end + + it "loads flags on start - from YAML" do + file = make_temp_file(all_properties_yaml) + with_data_source({ paths: [ file.path ] }) do |ds| + ds.start + expect(@store.initialized?).to eq(true) + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys) + end + end + + it "sets start event and initialized on successful load" do + file = make_temp_file(all_properties_json) + with_data_source({ paths: [ file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(ds.initialized?).to eq(true) + end + end + + it "sets start event and does not set initialized on unsuccessful load" do + with_data_source({ paths: [ bad_file_path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(ds.initialized?).to eq(false) + end + end + + it "does not reload modified file if auto-update is off" do + file = make_temp_file(flag_only_json) + + with_data_source({ paths: [ file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + + IO.write(file, all_properties_json) + sleep(0.5) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + end + end + + it "reloads modified file if auto-update is on" do + file = make_temp_file(flag_only_json) + + with_data_source({ auto_update: true, paths: [ file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + + sleep(1) + IO.write(file, all_properties_json) + + max_time = 10 + ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } + expect(ok).to eq(true), "Waited #{max_time}s after modifying file and it did not reload" + end + end + + def wait_for_condition(max_time) + deadline = Time.now + max_time + while Time.now < deadline + return true if yield + sleep(0.1) + end + false + end +end From 22ebdeddf21c3d7cf9602add1442e934ead6b43d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 11:03:21 -0700 Subject: [PATCH 024/182] add poll interval param, tolerate single file path string, add doc comments --- lib/ldclient-rb/file_data_source.rb | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 65ba0735..c61ddcf9 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -80,6 +80,20 @@ module LaunchDarkly # duplicate key-- it will not load flags from any of the files. # class FileDataSource + # + # Returns a factory for the file data source component. + # + # @param options [Hash] the configuration options + # @option options [Array] :paths The paths of the source files for loading flag data. These + # may be absolute paths or relative to the current working directory. + # @option options [Boolean] :auto_update True if the data source should watch for changes to + # the source file(s) and reload flags whenever there is a change. Note that auto-updating + # will only work if all of the files you specified have valid directory paths at startup time. + # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for + # file modifications - used only if auto_update is true. On Linux and Mac platforms, you do + # not need to set this as there is a native OS mechanism for detecting file changes; on other + # platforms, the default interval is one second. + # def self.factory(options={}) return Proc.new do |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) @@ -92,7 +106,11 @@ def initialize(feature_store, logger, options={}) @feature_store = feature_store @logger = logger @paths = options[:paths] || [] + if @paths.is_a? String + @paths = [ @paths ] + end @auto_update = options[:auto_update] + @poll_interval = options[:poll_interval] @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new end @@ -196,7 +214,11 @@ def start_listener resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq - l = Listen.to(*dir_paths) do |modified, added, removed| + opts = {} + if !@poll_interval.nil? + opts[:latency] = @poll_interval + end + l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed if paths.any? { |p| path_set.include?(p) } load_all From b864390a2079c6588e3fae0d8f8cfce359136cb6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 12:02:29 -0700 Subject: [PATCH 025/182] make listen dependency optional --- ldclient-rb.gemspec | 2 +- lib/ldclient-rb/file_data_source.rb | 23 ++++++++++++++++++++--- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 9e7d5d04..0b8f4f9d 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -29,6 +29,7 @@ Gem::Specification.new do |spec| spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0" spec.add_development_dependency "timecop", "~> 0.9.1" + spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] @@ -40,5 +41,4 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "hashdiff", "~> 0.2" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" - spec.add_runtime_dependency "listen", "~> 3.0" end diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index c61ddcf9..833d6ec3 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -1,10 +1,21 @@ require 'concurrent/atomics' require 'json' require 'yaml' -require 'listen' require 'pathname' module LaunchDarkly + # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the + # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' + # gem has been provided by the host app. + @@have_listen = false + begin + require 'listen' + @@have_listen = true + rescue + end + def self.can_watch_files? + @@have_listen + end # # Provides a way to use local files as a source of feature flag state. This would typically be @@ -87,8 +98,10 @@ class FileDataSource # @option options [Array] :paths The paths of the source files for loading flag data. These # may be absolute paths or relative to the current working directory. # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. Note that auto-updating - # will only work if all of the files you specified have valid directory paths at startup time. + # the source file(s) and reload flags whenever there is a change. In order to use this + # feature, you must install the 'listen' gem - it is not included by default to avoid adding + # unwanted dependencies to the SDK. Note that auto-updating will only work if all of the files + # you specified have valid directory paths at startup time. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true. On Linux and Mac platforms, you do # not need to set this as there is a native OS mechanism for detecting file changes; on other @@ -110,6 +123,10 @@ def initialize(feature_store, logger, options={}) @paths = [ @paths ] end @auto_update = options[:auto_update] + if @auto_update && !LaunchDarkly::can_watch_files? + @logger.error { "[LDClient] To use the auto_update option for FileDataSource, you must install the 'listen' gem." } + @auto_update = false + end @poll_interval = options[:poll_interval] @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new From 789b5a4b54de8d84802af0579bacabbd07f92169 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 12:04:07 -0700 Subject: [PATCH 026/182] readme --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 1790b2d4..ead2bb6b 100644 --- a/README.md +++ b/README.md @@ -121,6 +121,10 @@ else end ``` +Using flag data from a file +--------------------------- +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. + Learn more ----------- From 31a62c59a8f2209dbd758ca27fe113825b2a2943 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 13:20:02 -0700 Subject: [PATCH 027/182] fix key handling and client integration, add tests --- lib/ldclient-rb/file_data_source.rb | 2 +- lib/ldclient-rb/ldclient.rb | 2 +- spec/file_data_source_spec.rb | 46 ++++++++++++++++++++++++++--- 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 833d6ec3..10588b5d 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -215,7 +215,7 @@ def add_item(all_data, kind, item) if !items[item[:key]].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end - items[item[:key]] = item + items[item[:key].to_sym] = item end def make_flag_with_value(key, value) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 94c24229..f8a75780 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -42,7 +42,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) if @config.update_processor @update_processor = @config.update_processor else - factory = @config.update_processor || self.method(:create_default_update_processor) + factory = @config.update_processor_factory || self.method(:create_default_update_processor) @update_processor = factory.call(sdk_key, config) end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index c163d385..cf5d52ad 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -3,17 +3,23 @@ describe LaunchDarkly::FileDataSource do let(:full_flag_1_key) { "flag1" } + let(:full_flag_1_value) { "on" } let(:flag_value_1_key) { "flag2" } - let(:all_flag_keys) { [ full_flag_1_key, flag_value_1_key ] } + let(:flag_value_1) { "value2" } + let(:all_flag_keys) { [ full_flag_1_key.to_sym, flag_value_1_key.to_sym ] } let(:full_segment_1_key) { "seg1" } - let(:all_segment_keys) { [ full_segment_1_key ] } + let(:all_segment_keys) { [ full_segment_1_key.to_sym ] } let(:flag_only_json) { <<-EOF { "flags": { "flag1": { "key": "flag1", - "on": true + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] } } } @@ -25,7 +31,11 @@ "flags": { "flag1": { "key": "flag1", - "on": true + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] } }, "flagValues": { @@ -156,6 +166,34 @@ def with_data_source(options) end end + it "evaluates simplified flag with client as expected" do + file = make_temp_file(all_properties_json) + factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + client = LaunchDarkly::LDClient.new('sdkKey', config) + + begin + value = client.variation(flag_value_1_key, { key: 'user' }, '') + expect(value).to eq(flag_value_1) + ensure + client.close + end + end + + it "evaluates full flag with client as expected" do + file = make_temp_file(all_properties_json) + factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + client = LaunchDarkly::LDClient.new('sdkKey', config) + + begin + value = client.variation(full_flag_1_key, { key: 'user' }, '') + expect(value).to eq(full_flag_1_value) + ensure + client.close + end + end + def wait_for_condition(max_time) deadline = Time.now + max_time while Time.now < deadline From 778cb6dc5e4c2c367ccd2c1c7399a1338ec5196a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 12:08:46 -0700 Subject: [PATCH 028/182] debugging --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..58c754ba 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -22,7 +22,7 @@ ruby-docker-template: &ruby-docker-template - run: gem install bundler - run: bundle install - run: mkdir ./rspec - - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - run: LISTEN_GEM_DEBUGGING=2 bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: path: ./rspec - store_artifacts: From 20dbef28105da9a1eca453ee86f2ff90267f4793 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 12:13:58 -0700 Subject: [PATCH 029/182] debugging --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 58c754ba..05bc4746 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -22,7 +22,7 @@ ruby-docker-template: &ruby-docker-template - run: gem install bundler - run: bundle install - run: mkdir ./rspec - - run: LISTEN_GEM_DEBUGGING=2 bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: path: ./rspec - store_artifacts: @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - bundle exec rspec spec; + LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; done From f1c00b1616a6767dd350c44497ba71d6b03e4bff Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 15:47:10 -0700 Subject: [PATCH 030/182] add fallback polling logic, fix tests --- lib/ldclient-rb/file_data_source.rb | 85 ++++++++++++++++++++++------- spec/file_data_source_spec.rb | 23 +++++++- 2 files changed, 86 insertions(+), 22 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 10588b5d..ae19bea8 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -13,7 +13,7 @@ module LaunchDarkly @@have_listen = true rescue end - def self.can_watch_files? + def self.have_listen? @@have_listen end @@ -23,10 +23,10 @@ def self.can_watch_files? # actual LaunchDarkly connection. # # To use this component, call `FileDataSource.factory`, and store its return value in the - # `update_processor_factory` property of your LaunchDarkly client configuration. In the options + # `update_processor_class` property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # - # config.update_processor_factory = FileDataSource.factory(paths: [ myFilePath ]) + # config.update_processor_class = FileDataSource.factory(paths: [ myFilePath ]) # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled @@ -98,14 +98,15 @@ class FileDataSource # @option options [Array] :paths The paths of the source files for loading flag data. These # may be absolute paths or relative to the current working directory. # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. In order to use this - # feature, you must install the 'listen' gem - it is not included by default to avoid adding - # unwanted dependencies to the SDK. Note that auto-updating will only work if all of the files - # you specified have valid directory paths at startup time. + # the source file(s) and reload flags whenever there is a change. Note that the default + # implementation of this feature is based on polling the filesystem, which may not perform + # well. If you install the 'listen' gem (not included by default, to avoid adding unwanted + # dependencies to the SDK), its native file watching mechanism will be used instead. Note + # that auto-updating will only work if all of the files you specified have valid directory + # paths at startup time. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for - # file modifications - used only if auto_update is true. On Linux and Mac platforms, you do - # not need to set this as there is a native OS mechanism for detecting file changes; on other - # platforms, the default interval is one second. + # file modifications - used only if auto_update is true, and if the native file-watching + # mechanism from 'listen' is not being used. # def self.factory(options={}) return Proc.new do |sdk_key, config| @@ -123,11 +124,8 @@ def initialize(feature_store, logger, options={}) @paths = [ @paths ] end @auto_update = options[:auto_update] - if @auto_update && !LaunchDarkly::can_watch_files? - @logger.error { "[LDClient] To use the auto_update option for FileDataSource, you must install the 'listen' gem." } - @auto_update = false - end - @poll_interval = options[:poll_interval] + @use_listen = @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests + @poll_interval = options[:poll_interval] || 1 @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new end @@ -229,12 +227,17 @@ def make_flag_with_value(key, value) def start_listener resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } + if @use_listen + start_listener_with_listen_gem(resolved_paths) + else + FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all)) + end + end + + def start_listener_with_listen_gem(resolved_paths) path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq - opts = {} - if !@poll_interval.nil? - opts[:latency] = @poll_interval - end + opts = { latency: @poll_interval } l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed if paths.any? { |p| path_set.include?(p) } @@ -244,5 +247,49 @@ def start_listener l.start l end + + # + # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. + # + class FileDataSourcePoller + def initialize(resolved_paths, interval, reloader) + @stopped = Concurrent::AtomicBoolean.new(false) + get_file_times = Proc.new do + ret = {} + resolved_paths.each do |path| + begin + ret[path] = File.mtime(path) + rescue + ret[path] = nil + end + end + ret + end + last_times = get_file_times.call + @thread = Thread.new do + while true + sleep interval + break if @stopped.value + new_times = get_file_times.call + changed = false + last_times.each do |path, old_time| + new_time = new_times[path] + if !new_time.nil? && new_time != old_time + changed = true + break + end + end + if changed + reloader.call + end + end + end + end + + def stop + @stopped.make_true + @thread.run # wakes it up if it's sleeping + end + end end end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index cf5d52ad..5267a5f2 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -71,10 +71,18 @@ before do @config = LaunchDarkly::Config.new @store = @config.feature_store + @tmp_dir = Dir.mktmpdir + end + + after do + FileUtils.remove_dir(@tmp_dir) end def make_temp_file(content) - file = Tempfile.new('flags') + # Note that we don't create our files in the default temp file directory, but rather in an empty directory + # that we made. That's because (depending on the platform) the temp file directory may contain huge numbers + # of files, which can make the file watcher perform poorly enough to break the tests. + file = Tempfile.new('flags', @tmp_dir) IO.write(file, content) file end @@ -149,10 +157,11 @@ def with_data_source(options) end end - it "reloads modified file if auto-update is on" do + def test_auto_reload(options) file = make_temp_file(flag_only_json) + options[:paths] = [ file.path ] - with_data_source({ auto_update: true, paths: [ file.path ] }) do |ds| + with_data_source(options) do |ds| event = ds.start expect(event.set?).to eq(true) expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) @@ -166,6 +175,14 @@ def with_data_source(options) end end + it "reloads modified file if auto-update is on" do + test_auto_reload({ auto_update: true }) + end + + it "reloads modified file in polling mode" do + test_auto_reload({ auto_update: true, force_polling: true, poll_interval: 0.1 }) + end + it "evaluates simplified flag with client as expected" do file = make_temp_file(all_properties_json) factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) From 198b843bba00fe92e9cfa9ef658c2649ce09be2f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 16:02:22 -0700 Subject: [PATCH 031/182] rm debugging --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 05bc4746..df9dac51 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; + bundle exec rspec spec; done From c5d1823372044bd067049fed90fb8e1f13428d94 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:25:16 -0700 Subject: [PATCH 032/182] debugging --- spec/file_data_source_spec.rb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 5267a5f2..194ebc2c 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -68,6 +68,8 @@ let(:bad_file_path) { "no-such-file" } + Thread.report_on_exception = true + before do @config = LaunchDarkly::Config.new @store = @config.feature_store From 9baffe35cf84bbfdbf77f01989437620f4124bc7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:38:15 -0700 Subject: [PATCH 033/182] debugging --- .circleci/config.yml | 2 +- spec/file_data_source_spec.rb | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..05bc4746 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - bundle exec rspec spec; + LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; done diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 194ebc2c..5267a5f2 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -68,8 +68,6 @@ let(:bad_file_path) { "no-such-file" } - Thread.report_on_exception = true - before do @config = LaunchDarkly::Config.new @store = @config.feature_store From 4d8121592756df99aefbef4c0aeb78032f544046 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:47:03 -0700 Subject: [PATCH 034/182] debugging --- lib/ldclient-rb/file_data_source.rb | 2 ++ spec/file_data_source_spec.rb | 1 + 2 files changed, 3 insertions(+) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index ae19bea8..de8ef34e 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -238,8 +238,10 @@ def start_listener_with_listen_gem(resolved_paths) path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq opts = { latency: @poll_interval } + puts('*** starting listener') l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed + puts('*** got listener notification: #{paths}') if paths.any? { |p| path_set.include?(p) } load_all end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 5267a5f2..f06c19f9 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -168,6 +168,7 @@ def test_auto_reload(options) sleep(1) IO.write(file, all_properties_json) + puts('*** modified the file') max_time = 10 ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } From 30d0cd270acf6518555e126bad28c689177ebb1d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:48:09 -0700 Subject: [PATCH 035/182] debugging --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index de8ef34e..9a63e56b 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -241,7 +241,7 @@ def start_listener_with_listen_gem(resolved_paths) puts('*** starting listener') l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed - puts('*** got listener notification: #{paths}') + puts("*** got listener notification: #{paths}") if paths.any? { |p| path_set.include?(p) } load_all end From 8cb2ed9adc1a7ac486f077eeb37d0100fa9d9bb5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:51:27 -0700 Subject: [PATCH 036/182] comment correction --- lib/ldclient-rb/file_data_source.rb | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 9a63e56b..71f3a8be 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -23,10 +23,11 @@ def self.have_listen? # actual LaunchDarkly connection. # # To use this component, call `FileDataSource.factory`, and store its return value in the - # `update_processor_class` property of your LaunchDarkly client configuration. In the options + # `update_processor_factory` property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # - # config.update_processor_class = FileDataSource.factory(paths: [ myFilePath ]) + # factory = FileDataSource.factory(paths: [ myFilePath ]) + # config = LaunchDarkly::Config.new(update_processor_factory: factory) # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled From a10f973ad98f033bd480e2ca9568041e826cd02b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:15:29 -0700 Subject: [PATCH 037/182] documentation --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 71f3a8be..721eff75 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -107,7 +107,7 @@ class FileDataSource # paths at startup time. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching - # mechanism from 'listen' is not being used. + # mechanism from 'listen' is not being used. The default value is 1 second. # def self.factory(options={}) return Proc.new do |sdk_key, config| From 16cf9c086c06344d352b6e85bb6e02449af44cc1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:15:54 -0700 Subject: [PATCH 038/182] always use YAML parser --- lib/ldclient-rb/file_data_source.rb | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 721eff75..a607923d 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -190,11 +190,10 @@ def load_file(path, all_data) end def parse_content(content) - if content.strip.start_with?("{") - JSON.parse(content, symbolize_names: true) - else - symbolize_all_keys(YAML.load(content)) - end + # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while + # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least + # for all the samples of actual flag data that we've tested). + symbolize_all_keys(YAML.load(content)) end def symbolize_all_keys(value) From 27d954e7f5f84ba4b87573ff80e9304a4eedab3b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:21:29 -0700 Subject: [PATCH 039/182] report internal error that shouldn't happen --- lib/ldclient-rb/file_data_source.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index a607923d..fae68123 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -209,7 +209,8 @@ def symbolize_all_keys(value) end def add_item(all_data, kind, item) - items = all_data[kind] || {} + items = all_data[kind] + raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash if !items[item[:key]].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end From fd308a9de3142b8fd493a995411d320a42664932 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:24:28 -0700 Subject: [PATCH 040/182] add test for multiple files --- spec/file_data_source_spec.rb | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index f06c19f9..c0af4c67 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -24,7 +24,19 @@ } } EOF -} + } + + let(:segment_only_json) { <<-EOF + { + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +EOF + } let(:all_properties_json) { <<-EOF { @@ -143,6 +155,16 @@ def with_data_source(options) end end + it "can load multiple files" do + file1 = make_temp_file(flag_only_json) + file2 = make_temp_file(segment_only_json) + with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| + ds.start + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([ full_flag_1_key.to_sym ]) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ]) + end + end + it "does not reload modified file if auto-update is off" do file = make_temp_file(flag_only_json) From 1d016bfc9349000c8ddffce20b48634e1e20d6b3 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:26:10 -0700 Subject: [PATCH 041/182] fix duplicate key checking (string vs. symbol problem) --- lib/ldclient-rb/file_data_source.rb | 5 +++-- spec/file_data_source_spec.rb | 11 +++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index fae68123..aebd9709 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -211,10 +211,11 @@ def symbolize_all_keys(value) def add_item(all_data, kind, item) items = all_data[kind] raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash - if !items[item[:key]].nil? + key = item[:key].to_sym + if !items[key].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end - items[item[:key].to_sym] = item + items[key] = item end def make_flag_with_value(key, value) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index c0af4c67..10e49e3c 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -160,11 +160,22 @@ def with_data_source(options) file2 = make_temp_file(segment_only_json) with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| ds.start + expect(@store.initialized?).to eq(true) expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([ full_flag_1_key.to_sym ]) expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ]) end end + it "does not allow duplicate keys" do + file1 = make_temp_file(flag_only_json) + file2 = make_temp_file(flag_only_json) + with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| + ds.start + expect(@store.initialized?).to eq(false) + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([]) + end + end + it "does not reload modified file if auto-update is off" do file = make_temp_file(flag_only_json) From c3e66d35c64909084d6d879fa485497fddf6c4a4 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:44:09 -0700 Subject: [PATCH 042/182] Don't use 'listen' in JRuby 9.1 --- lib/ldclient-rb/file_data_source.rb | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index aebd9709..23834be4 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -99,12 +99,12 @@ class FileDataSource # @option options [Array] :paths The paths of the source files for loading flag data. These # may be absolute paths or relative to the current working directory. # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. Note that the default - # implementation of this feature is based on polling the filesystem, which may not perform - # well. If you install the 'listen' gem (not included by default, to avoid adding unwanted - # dependencies to the SDK), its native file watching mechanism will be used instead. Note - # that auto-updating will only work if all of the files you specified have valid directory - # paths at startup time. + # the source file(s) and reload flags whenever there is a change. Auto-updating will only + # work if all of the files you specified have valid directory paths at startup time. + # Note that the default implementation of this feature is based on polling the filesystem, + # which may not perform well. If you install the 'listen' gem (not included by default, to + # avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be + # used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching # mechanism from 'listen' is not being used. The default value is 1 second. @@ -125,7 +125,15 @@ def initialize(feature_store, logger, options={}) @paths = [ @paths ] end @auto_update = options[:auto_update] - @use_listen = @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests + if @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests + # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449). + # Therefore, on that platform we'll fall back to file polling instead. + if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.") + @use_listen = false + else + @use_listen = true + end + end @poll_interval = options[:poll_interval] || 1 @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new From 1a36fd86ab5b867ad265e89f13d9c8e839278b39 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:50:22 -0700 Subject: [PATCH 043/182] rm debugging --- .circleci/config.yml | 2 +- lib/ldclient-rb/file_data_source.rb | 2 -- spec/file_data_source_spec.rb | 1 - 3 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 05bc4746..df9dac51 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; + bundle exec rspec spec; done diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 23834be4..1549f6ec 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -248,10 +248,8 @@ def start_listener_with_listen_gem(resolved_paths) path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq opts = { latency: @poll_interval } - puts('*** starting listener') l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed - puts("*** got listener notification: #{paths}") if paths.any? { |p| path_set.include?(p) } load_all end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 10e49e3c..60107e26 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -201,7 +201,6 @@ def test_auto_reload(options) sleep(1) IO.write(file, all_properties_json) - puts('*** modified the file') max_time = 10 ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } From 78ba8150b1a486b2a568ff7ac59f8b589fdfe98e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 12:02:32 -0700 Subject: [PATCH 044/182] better error handling in poll thread --- lib/ldclient-rb/file_data_source.rb | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 1549f6ec..c5207afb 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -240,7 +240,7 @@ def start_listener if @use_listen start_listener_with_listen_gem(resolved_paths) else - FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all)) + FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger) end end @@ -262,14 +262,14 @@ def start_listener_with_listen_gem(resolved_paths) # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. # class FileDataSourcePoller - def initialize(resolved_paths, interval, reloader) + def initialize(resolved_paths, interval, reloader, logger) @stopped = Concurrent::AtomicBoolean.new(false) get_file_times = Proc.new do ret = {} resolved_paths.each do |path| begin ret[path] = File.mtime(path) - rescue + rescue Errno::ENOENT ret[path] = nil end end @@ -280,17 +280,19 @@ def initialize(resolved_paths, interval, reloader) while true sleep interval break if @stopped.value - new_times = get_file_times.call - changed = false - last_times.each do |path, old_time| - new_time = new_times[path] - if !new_time.nil? && new_time != old_time - changed = true - break + begin + new_times = get_file_times.call + changed = false + last_times.each do |path, old_time| + new_time = new_times[path] + if !new_time.nil? && new_time != old_time + changed = true + break + end end - end - if changed - reloader.call + reloader.call if changed + rescue => exn + Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn) end end end From 38f534fd3b5968a7d6f75cf5f214be768f810f9f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 12:51:09 -0700 Subject: [PATCH 045/182] don't use Thread.raise to stop PollingProcessor thread; add test for PollingProcessor.stop --- lib/ldclient-rb/polling.rb | 3 +- spec/polling_spec.rb | 81 ++++++++++++++++++++++++++------------ 2 files changed, 58 insertions(+), 26 deletions(-) diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 15965201..4ecd93f8 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -26,7 +26,8 @@ def start def stop if @stopped.make_true if @worker && @worker.alive? - @worker.raise "shutting down client" + @worker.run # causes the thread to wake up if it's currently in a sleep + @worker.join end @config.logger.info { "[LDClient] Polling connection stopped" } end diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index 8183b8c3..690147d0 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -3,10 +3,17 @@ describe LaunchDarkly::PollingProcessor do subject { LaunchDarkly::PollingProcessor } - let(:store) { LaunchDarkly::InMemoryFeatureStore.new } - let(:config) { LaunchDarkly::Config.new(feature_store: store) } let(:requestor) { double() } - let(:processor) { subject.new(config, requestor) } + + def with_processor(store) + config = LaunchDarkly::Config.new(feature_store: store) + processor = subject.new(config, requestor) + begin + yield processor + ensure + processor.stop + end + end describe 'successful request' do flag = { key: 'flagkey', version: 1 } @@ -22,47 +29,60 @@ it 'puts feature data in store' do allow(requestor).to receive(:request_all_data).and_return(all_data) - ready = processor.start - ready.wait - expect(store.get(LaunchDarkly::FEATURES, "flagkey")).to eq(flag) - expect(store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(segment) + store = LaunchDarkly::InMemoryFeatureStore.new + with_processor(store) do |processor| + ready = processor.start + ready.wait + expect(store.get(LaunchDarkly::FEATURES, "flagkey")).to eq(flag) + expect(store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(segment) + end end it 'sets initialized to true' do allow(requestor).to receive(:request_all_data).and_return(all_data) - ready = processor.start - ready.wait - expect(processor.initialized?).to be true - expect(store.initialized?).to be true + store = LaunchDarkly::InMemoryFeatureStore.new + with_processor(store) do |processor| + ready = processor.start + ready.wait + expect(processor.initialized?).to be true + expect(store.initialized?).to be true + end end end describe 'connection error' do it 'does not cause immediate failure, does not set initialized' do allow(requestor).to receive(:request_all_data).and_raise(StandardError.new("test error")) - ready = processor.start - finished = ready.wait(0.2) - expect(finished).to be false - expect(processor.initialized?).to be false - expect(store.initialized?).to be false + store = LaunchDarkly::InMemoryFeatureStore.new + with_processor(store) do |processor| + ready = processor.start + finished = ready.wait(0.2) + expect(finished).to be false + expect(processor.initialized?).to be false + expect(store.initialized?).to be false + end end end describe 'HTTP errors' do def verify_unrecoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - ready = processor.start - finished = ready.wait(0.2) - expect(finished).to be true - expect(processor.initialized?).to be false + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + ready = processor.start + finished = ready.wait(0.2) + expect(finished).to be true + expect(processor.initialized?).to be false + end end def verify_recoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - ready = processor.start - finished = ready.wait(0.2) - expect(finished).to be false - expect(processor.initialized?).to be false + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + ready = processor.start + finished = ready.wait(0.2) + expect(finished).to be false + expect(processor.initialized?).to be false + end end it 'stops immediately for error 401' do @@ -85,5 +105,16 @@ def verify_recoverable_http_error(status) verify_recoverable_http_error(503) end end -end + describe 'stop' do + it 'stops promptly rather than continuing to wait for poll interval' do + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + sleep(1) # somewhat arbitrary, but should ensure that it has started polling + start_time = Time.now + processor.stop + end_time = Time.now + expect(end_time - start_time).to be <(LaunchDarkly::Config.default_poll_interval - 5) + end + end + end +end From 5a875c8db7fff721c60040334c6da2df1133c9d7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 10 Dec 2018 12:24:24 -0800 Subject: [PATCH 046/182] test on most recent patch version of each Ruby minor version --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..7fe98354 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -32,22 +32,22 @@ jobs: test-2.2: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.2.9-jessie + - image: circleci/ruby:2.2.10-jessie - image: redis test-2.3: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.3.6-jessie + - image: circleci/ruby:2.3.7-jessie - image: redis test-2.4: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.4.4-stretch + - image: circleci/ruby:2.4.5-stretch - image: redis test-2.5: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.5.1-stretch + - image: circleci/ruby:2.5.3-stretch - image: redis test-jruby-9.2: <<: *ruby-docker-template From cf7d9002e1adac2335d50ccb20b278dfedce4ad6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 17:47:40 -0800 Subject: [PATCH 047/182] factor common logic out of RedisFeatureStore, add integrations module --- lib/ldclient-rb.rb | 2 + lib/ldclient-rb/in_memory_store.rb | 2 + lib/ldclient-rb/integrations.rb | 257 +++++++++++++++++++++++ lib/ldclient-rb/interfaces.rb | 102 +++++++++ lib/ldclient-rb/redis_store.rb | 327 +++++++++++++---------------- spec/feature_store_spec_base.rb | 2 +- spec/integrations_helpers_spec.rb | 276 ++++++++++++++++++++++++ spec/redis_feature_store_spec.rb | 28 +-- 8 files changed, 803 insertions(+), 193 deletions(-) create mode 100644 lib/ldclient-rb/integrations.rb create mode 100644 lib/ldclient-rb/interfaces.rb create mode 100644 spec/integrations_helpers_spec.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index d3ee6ffc..a1d7ffd9 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -1,4 +1,5 @@ require "ldclient-rb/version" +require "ldclient-rb/interfaces" require "ldclient-rb/util" require "ldclient-rb/evaluation" require "ldclient-rb/flags_state" @@ -17,5 +18,6 @@ require "ldclient-rb/event_summarizer" require "ldclient-rb/events" require "ldclient-rb/redis_store" +require "ldclient-rb/integrations" require "ldclient-rb/requestor" require "ldclient-rb/file_data_source" diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index e3e85879..4814c85d 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -20,6 +20,8 @@ module LaunchDarkly # streaming API. # class InMemoryFeatureStore + include LaunchDarkly::Interfaces::FeatureStore + def initialize @items = Hash.new @lock = Concurrent::ReadWriteLock.new diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb new file mode 100644 index 00000000..21910b09 --- /dev/null +++ b/lib/ldclient-rb/integrations.rb @@ -0,0 +1,257 @@ +require "concurrent/atomics" + +module LaunchDarkly + module Integrations + module Redis + # + # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of + # Redis running at `localhost` with its default port. + # + # @return [String] the default Redis URL + # + def default_redis_url + 'redis://localhost:6379/0' + end + + # + # Default value for the `prefix` option for {new_feature_store}. + # + # @return [String] the default key prefix + # + def default_prefix + 'launchdarkly' + end + + # + # Creates a Redis-backed persistent feature store. + # + # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, + # put the object returned by this method into the `feature_store` property of your + # client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @option opts [Object] :pool custom connection pool, if desired + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def new_feature_store(opts) + return RedisFeatureStore.new(opts) + end + end + + module Helpers + # + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} + # pattern that delegates part of its behavior to another object, while providing optional caching + # behavior and other logic that would otherwise be repeated in every feature store implementation. + # This makes it easier to create new database integrations by implementing only the database-specific + # logic. + # + # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner + # implementation object. + # + class CachingStoreWrapper + include LaunchDarkly::Interfaces::FeatureStore + + INITED_CACHE_KEY = "$inited" + + private_constant :INITED_CACHE_KEY + + # + # Creates a new store wrapper instance. + # + # @param core [Object] an object that implements the {FeatureStoreCore} methods + # @param opts [Hash] a hash that may include cache-related options; all others will be ignored + # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # + def initialize(core, opts) + @core = core + + expiration_seconds = opts[:expiration] || 15 + capacity = opts[:capacity] || 1000 + if expiration_seconds > 0 + @cache = ExpiringCache.new(capacity, expiration_seconds) + else + @cache = nil + end + + @inited = Concurrent::AtomicBoolean.new(false) + end + + def init(all_data) + @core.init_internal(all_data) + @inited.make_true + + if !@cache.nil? + @cache.clear + all_data.each do |kind, items| + @cache[kind] = items_if_not_deleted(items) + items.each do |key, item| + @cache[item_cache_key(kind, key)] = [item] + end + end + end + end + + def get(kind, key) + if !@cache.nil? + cache_key = item_cache_key(kind, key) + cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values + return item_if_not_deleted(cached[0]) if !cached.nil? + end + + item = @core.get_internal(kind, key) + + if !@cache.nil? + @cache[cache_key] = [item] + end + + item_if_not_deleted(item) + end + + def all(kind) + if !@cache.nil? + items = @cache[all_cache_key(kind)] + return items if !items.nil? + end + + items = items_if_not_deleted(@core.get_all_internal(kind)) + @cache[all_cache_key(kind)] = items if !@cache.nil? + items + end + + def upsert(kind, item) + new_state = @core.upsert_internal(kind, item) + + if !@cache.nil? + @cache[item_cache_key(kind, item[:key])] = [new_state] + @cache.delete(all_cache_key(kind)) + end + end + + def delete(kind, key, version) + upsert(kind, { key: key, version: version, deleted: true }) + end + + def initialized? + return true if @inited.value + + if @cache.nil? + result = @core.initialized_internal? + else + result = @cache[INITED_CACHE_KEY] + if result.nil? + result = @core.initialized_internal? + @cache[INITED_CACHE_KEY] = result + end + end + + @inited.make_true if result + result + end + + def stop + @core.stop + end + + private + + def all_cache_key(kind) + kind + end + + def item_cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + def item_if_not_deleted(item) + (item.nil? || item[:deleted]) ? nil : item + end + + def items_if_not_deleted(items) + items.select { |key, item| !item[:deleted] } + end + end + + # + # This module describes the methods that you must implement on your own object in order to + # use {CachingStoreWrapper}. + # + module FeatureStoreCore + # + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, + # but the wrapper will take care of updating the cache if caching is enabled. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # + def init_internal(all_data) + end + + # + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found + # + def get_internal(kind, key) + end + + # + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def get_all_internal(kind) + end + + # + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} + # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. + # the method is expected to return the final state of the entity (i.e. either the `item` + # parameter if the update succeeded, or the previously existing entity in the store if the + # update failed; this is used for the caching logic). + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # @return [Hash] the entity as it now exists in the store after the update + # + def upsert_internal(kind, item) + end + + # + # Checks whether this store has been initialized. This is the same as + # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern + # for efficiency, because the wrapper will use caching and memoization in order to call the method + # as little as possible. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized_internal? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + def stop + end + end + end + end +end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb new file mode 100644 index 00000000..09e7797d --- /dev/null +++ b/lib/ldclient-rb/interfaces.rb @@ -0,0 +1,102 @@ + +module LaunchDarkly + module Interfaces + # + # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly + # client uses the feature store to persist feature flags and related objects received from + # the LaunchDarkly service. Implementations must support concurrent access and updates. + # For more about how feature stores can be used, see: + # [Using a persistent feature store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # + # An entity that can be stored in a feature store is a hash that can be converted to and from + # JSON, and that has at a minimum the following properties: `:key`, a string that is unique + # among entities of the same kind; `:version`, an integer that is higher for newer data; + # `:deleted`, a boolean (optional, defaults to false) that if true means this is a + # placeholder for a deleted entity. + # + # Examples of a "kind" are feature flags and segments; each of these is associated with an + # object such as {LaunchDarkly::FEATURES} and {LaunchDarkly::SEGMENTS}. The "kind" objects are + # hashes with a single property, `:namespace`, which is a short string unique to that kind. + # + # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations + # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new + # implementation, see {LaunchDarkly::Integrations::Helpers} for tools that can make this task + # simpler. + # + module FeatureStore + # + # Initializes (or re-initializes) the store with the specified set of entities. Any + # existing entries will be removed. Implementations can assume that this data set is up to + # date-- there is no need to perform individual version comparisons between the existing + # objects and the supplied features. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # + def init(all_data) + end + + # + # Returns the entity to which the specified key is mapped, if any. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found, or if the stored entity's + # `:deleted` property was true + # + def get(kind, key) + end + + # + # Returns all stored entities of the specified kind, not including deleted entities. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def all(kind) + end + + # + # Attempt to add an entity, or update an existing entity with the same key. An update + # should only succeed if the new item's `:version` is greater than the old one; + # otherwise, the method should do nothing. + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # + def upsert(kind, item) + end + + # + # Attempt to delete an entity if it exists. Deletion should only succeed if the + # `version` parameter is greater than the existing entity's `:version`; otherwise, the + # method should do nothing. + # + # @param kind [Object] the kind of entity to delete + # @param key [String] the unique key of the entity + # @param version [Integer] the entity must have a lower version than this to be deleted + # + def delete(kind, key, version) + end + + # + # Checks whether this store has been initialized. That means that `init` has been called + # either by this process, or (if the store can be shared) by another process. This + # method will be called frequently, so it should be efficient. You can assume that if it + # has returned true once, it can continue to return true, i.e. a store cannot become + # uninitialized again. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + def stop + end + end + end +end diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 3729ca6b..99912f5f 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -1,6 +1,5 @@ require "concurrent/atomics" require "json" -require "thread_safe" module LaunchDarkly # @@ -13,6 +12,9 @@ module LaunchDarkly # installed. Then, create an instance and store it in the `feature_store` property # of your client configuration. # + # @deprecated Use {LaunchDarkly::Integrations::Redis#new_feature_store} instead. This specific + # implementation class may change in the future. + # class RedisFeatureStore begin require "redis" @@ -22,6 +24,139 @@ class RedisFeatureStore REDIS_ENABLED = false end + include LaunchDarkly::Interfaces::FeatureStore + + # + # Internal implementation of the Redis feature store. We put a CachingStoreWrapper around this. + # + class RedisFeatureStoreCore + def initialize(opts) + @redis_opts = opts[:redis_opts] || Hash.new + if opts[:redis_url] + @redis_opts[:url] = opts[:redis_url] + end + if !@redis_opts.include?(:url) + @redis_opts[:url] = LaunchDarkly::Integrations::Redis.default_redis_url + end + max_connections = opts[:max_connections] || 16 + @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do + Redis.new(@redis_opts) + end + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis.default_prefix + @logger = opts[:logger] || Config.default_logger + @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented + + @stopped = Concurrent::AtomicBoolean.new(false) + + with_connection do |redis| + @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + and prefix: #{@prefix}") + end + end + + def init_internal(all_data) + count = 0 + with_connection do |redis| + all_data.each do |kind, items| + redis.multi do |multi| + multi.del(items_key(kind)) + count = count + items.count + items.each { |key, item| + redis.hset(items_key(kind), key, item.to_json) + } + end + end + end + @logger.info { "RedisFeatureStore: initialized with #{count} items" } + end + + def get_internal(kind, key) + with_connection do |redis| + get_redis(redis, kind, key) + end + end + + def get_all_internal(kind) + fs = {} + with_connection do |redis| + hashfs = redis.hgetall(items_key(kind)) + hashfs.each do |k, json_item| + f = JSON.parse(json_item, symbolize_names: true) + fs[k.to_sym] = f + end + end + fs + end + + def upsert_internal(kind, new_item) + base_key = items_key(kind) + key = new_item[:key] + try_again = true + final_item = new_item + while try_again + try_again = false + with_connection do |redis| + redis.watch(base_key) do + old_item = get_redis(redis, kind, key) + before_update_transaction(base_key, key) + if old_item.nil? || old_item[:version] < new_item[:version] + result = redis.multi do |multi| + multi.hset(base_key, key, new_item.to_json) + end + if result.nil? + @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } + try_again = true + end + else + final_item = old_item + action = new_item[:deleted] ? "delete" : "update" + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ +in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } + end + redis.unwatch + end + end + end + final_item + end + + def initialized_internal? + with_connection { |redis| redis.exists(items_key(FEATURES)) } + end + + def stop + if @stopped.make_true + @pool.shutdown { |redis| redis.close } + end + end + + private + + # exposed for testing + def before_update_transaction(base_key, key) + @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? + end + + def items_key(kind) + @prefix + ":" + kind[:namespace] + end + + def cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + def with_connection + @pool.with { |redis| yield(redis) } + end + + def get_redis(redis, kind, key) + json_item = redis.hget(items_key(kind), key) + json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) + end + end + + private_constant :RedisFeatureStoreCore + # # Constructor for a RedisFeatureStore instance. # @@ -31,45 +166,17 @@ class RedisFeatureStore # @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration_seconds expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally - # @option opts [Object] :pool custom connection pool, used for testing only + # @option opts [Object] :pool custom connection pool, if desired # def initialize(opts = {}) if !REDIS_ENABLED raise RuntimeError.new("can't use RedisFeatureStore because one of these gems is missing: redis, connection_pool") end - @redis_opts = opts[:redis_opts] || Hash.new - if opts[:redis_url] - @redis_opts[:url] = opts[:redis_url] - end - if !@redis_opts.include?(:url) - @redis_opts[:url] = RedisFeatureStore.default_redis_url - end - max_connections = opts[:max_connections] || 16 - @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do - Redis.new(@redis_opts) - end - @prefix = opts[:prefix] || RedisFeatureStore.default_prefix - @logger = opts[:logger] || Config.default_logger - - expiration_seconds = opts[:expiration] || 15 - capacity = opts[:capacity] || 1000 - if expiration_seconds > 0 - @cache = ExpiringCache.new(capacity, expiration_seconds) - else - @cache = nil - end - @stopped = Concurrent::AtomicBoolean.new(false) - @inited = MemoizedValue.new { - query_inited - } - - with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ -and prefix: #{@prefix}") - end + @core = RedisFeatureStoreCore.new(opts) + @wrapper = LaunchDarkly::Integrations::Helpers::CachingStoreWrapper.new(@core, opts) end # @@ -77,178 +184,42 @@ def initialize(opts = {}) # running at `localhost` with its default port. # def self.default_redis_url - 'redis://localhost:6379/0' + LaunchDarkly::Integrations::Redis.default_redis_url end # # Default value for the `prefix` constructor parameter. # def self.default_prefix - 'launchdarkly' + LaunchDarkly::Integrations::Redis.default_prefix end def get(kind, key) - f = @cache.nil? ? nil : @cache[cache_key(kind, key)] - if f.nil? - @logger.debug { "RedisFeatureStore: no cache hit for #{key} in '#{kind[:namespace]}', requesting from Redis" } - f = with_connection do |redis| - begin - get_redis(kind, redis, key.to_sym) - rescue => e - @logger.error { "RedisFeatureStore: could not retrieve #{key} from Redis in '#{kind[:namespace]}', with error: #{e}" } - nil - end - end - end - if f.nil? - @logger.debug { "RedisFeatureStore: #{key} not found in '#{kind[:namespace]}'" } - nil - elsif f[:deleted] - @logger.debug { "RedisFeatureStore: #{key} was deleted in '#{kind[:namespace]}', returning nil" } - nil - else - f - end + @wrapper.get(kind, key) end def all(kind) - fs = {} - with_connection do |redis| - begin - hashfs = redis.hgetall(items_key(kind)) - rescue => e - @logger.error { "RedisFeatureStore: could not retrieve all '#{kind[:namespace]}' items from Redis with error: #{e}; returning none" } - hashfs = {} - end - hashfs.each do |k, jsonItem| - f = JSON.parse(jsonItem, symbolize_names: true) - if !f[:deleted] - fs[k.to_sym] = f - end - end - end - fs + @wrapper.all(kind) end def delete(kind, key, version) - update_with_versioning(kind, { key: key, version: version, deleted: true }) + @wrapper.delete(kind, key, version) end def init(all_data) - @cache.clear if !@cache.nil? - count = 0 - with_connection do |redis| - all_data.each do |kind, items| - begin - redis.multi do |multi| - multi.del(items_key(kind)) - count = count + items.count - items.each { |key, item| - redis.hset(items_key(kind), key, item.to_json) - } - end - items.each { |key, item| - put_cache(kind, key.to_sym, item) - } - rescue => e - @logger.error { "RedisFeatureStore: could not initialize '#{kind[:namespace]}' in Redis, error: #{e}" } - end - end - end - @inited.set(true) - @logger.info { "RedisFeatureStore: initialized with #{count} items" } + @wrapper.init(all_data) end def upsert(kind, item) - update_with_versioning(kind, item) + @wrapper.upsert(kind, item) end def initialized? - @inited.get + @wrapper.initialized? end def stop - if @stopped.make_true - @pool.shutdown { |redis| redis.close } - @cache.clear if !@cache.nil? - end - end - - private - - # exposed for testing - def before_update_transaction(base_key, key) - end - - def items_key(kind) - @prefix + ":" + kind[:namespace] - end - - def cache_key(kind, key) - kind[:namespace] + ":" + key.to_s - end - - def with_connection - @pool.with { |redis| yield(redis) } - end - - def get_redis(kind, redis, key) - begin - json_item = redis.hget(items_key(kind), key) - if json_item - item = JSON.parse(json_item, symbolize_names: true) - put_cache(kind, key, item) - item - else - nil - end - rescue => e - @logger.error { "RedisFeatureStore: could not retrieve #{key} from Redis, error: #{e}" } - nil - end - end - - def put_cache(kind, key, value) - @cache[cache_key(kind, key)] = value if !@cache.nil? - end - - def update_with_versioning(kind, new_item) - base_key = items_key(kind) - key = new_item[:key] - try_again = true - while try_again - try_again = false - with_connection do |redis| - redis.watch(base_key) do - old_item = get_redis(kind, redis, key) - before_update_transaction(base_key, key) - if old_item.nil? || old_item[:version] < new_item[:version] - begin - result = redis.multi do |multi| - multi.hset(base_key, key, new_item.to_json) - end - if result.nil? - @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } - try_again = true - else - put_cache(kind, key.to_sym, new_item) - end - rescue => e - @logger.error { "RedisFeatureStore: could not store #{key} in Redis, error: #{e}" } - end - else - action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ - in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } - end - redis.unwatch - end - end - end - end - - def query_inited - with_connection { |redis| redis.exists(items_key(FEATURES)) } + @wrapper.stop end end end diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index d6c1cedc..0e0f1ca9 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,6 +1,6 @@ require "spec_helper" -RSpec.shared_examples "feature_store" do |create_store_method| +shared_examples "feature_store" do |create_store_method| let(:feature0) { { diff --git a/spec/integrations_helpers_spec.rb b/spec/integrations_helpers_spec.rb new file mode 100644 index 00000000..24404a72 --- /dev/null +++ b/spec/integrations_helpers_spec.rb @@ -0,0 +1,276 @@ +require "spec_helper" + +describe LaunchDarkly::Integrations::Helpers::CachingStoreWrapper do + subject { LaunchDarkly::Integrations::Helpers::CachingStoreWrapper } + + THINGS = { namespace: "things" } + + shared_examples "tests" do |cached| + opts = cached ? { expiration: 30 } : { expiration: 0 } + + it "gets item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets deleted item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1, deleted: true } + itemv2 = { key: key, version: 2, deleted: false } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets missing item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + item = { key: key, version: 1 } + + expect(wrapper.get(THINGS, key)).to eq nil + + core.force_set(THINGS, item) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result + end + + it "gets all items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 }) + + core.force_remove(THINGS, item2[:key]) + expect(wrapper.all(THINGS)).to eq (cached ? + { item1[:key] => item1, item2[:key] => item2 } : + { item1[:key] => item1 }) + end + + it "gets all items filtering out deleted items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1, deleted: true } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 }) + end + + it "upserts item successfully" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv1 + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + # if we have a cache, verify that the new item is now cached by writing a different value + # to the underlying data - Get should still return the cached item + if cached + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) + end + + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "deletes item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2, deleted: true } + itemv3 = { key: key, version: 3 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + wrapper.delete(THINGS, key, 2) + expect(core.data[THINGS][key]).to eq itemv2 + + core.force_set(THINGS, itemv3) # make a change that bypasses the cache + + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3) + end + end + + context "cached" do + include_examples "tests", true + + cached_opts = { expiration: 30 } + + it "get uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.get(THINGS, item1[:key])).to eq item1 + end + + it "get all uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 }) + end + + it "upsert doesn't update cache if unsuccessful" do + # This is for an upsert where the data in the store has a higher version. In an uncached + # store, this is just a no-op as far as the wrapper is concerned so there's nothing to + # test here. In a cached store, we need to verify that the cache has been refreshed + # using the data that was found in the store. + core = MockCore.new + wrapper = subject.new(core, cached_opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same + + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "initialized? can cache false result" do + core = MockCore.new + wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + sleep(0.5) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + # From this point on it should remain true and the method should not be called + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + end + + context "uncached" do + include_examples "tests", false + + uncached_opts = { expiration: 0 } + + it "queries internal initialized state only if not already inited" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + core.inited = false + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + + it "does not query internal initialized state if init has been called" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + wrapper.init({}) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 1 + end + end + + class MockCore + def initialize + @data = {} + @inited = false + @inited_query_count = 0 + end + + attr_reader :data + attr_reader :inited_query_count + attr_accessor :inited + + def force_set(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + @data[kind][item[:key]] = item + end + + def force_remove(kind, key) + @data[kind].delete(key) if @data.has_key?(kind) + end + + def init_internal(all_data) + @data = all_data + @inited = true + end + + def get_internal(kind, key) + items = @data[kind] + items.nil? ? nil : items[key] + end + + def get_all_internal(kind) + @data[kind] + end + + def upsert_internal(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + old_item = @data[kind][item[:key]] + return old_item if !old_item.nil? && old_item[:version] >= item[:version] + @data[kind][item[:key]] = item + item + end + + def initialized_internal? + @inited_query_count = @inited_query_count + 1 + @inited + end + end +end diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index d27cdb39..d5ccfb65 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -10,12 +10,12 @@ $null_log.level = ::Logger::FATAL -def create_redis_store() - LaunchDarkly::RedisFeatureStore.new(prefix: $my_prefix, logger: $null_log, expiration: 60) +def create_redis_store(opts = {}) + LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 60 })) end -def create_redis_store_uncached() - LaunchDarkly::RedisFeatureStore.new(prefix: $my_prefix, logger: $null_log, expiration: 0) +def create_redis_store_uncached(opts = {}) + LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 0 })) end @@ -32,9 +32,10 @@ def create_redis_store_uncached() include_examples "feature_store", method(:create_redis_store_uncached) end - def add_concurrent_modifier(store, other_client, flag, start_version, end_version) + def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) + test_hook = Object.new version_counter = start_version - expect(store).to receive(:before_update_transaction) { |base_key, key| + expect(test_hook).to receive(:before_update_transaction) { |base_key, key| if version_counter <= end_version new_flag = flag.clone new_flag[:version] = version_counter @@ -42,18 +43,18 @@ def add_concurrent_modifier(store, other_client, flag, start_version, end_versio version_counter = version_counter + 1 end }.at_least(:once) + test_hook end it "handles upsert race condition against external client with lower version" do - store = create_redis_store other_client = Redis.new({ url: "redis://localhost:6379" }) + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) + store = create_redis_store({ test_hook: test_hook }) begin - flag = { key: "foo", version: 1 } store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - add_concurrent_modifier(store, other_client, flag, 2, 4) - my_ver = { key: "foo", version: 10 } store.upsert(LaunchDarkly::FEATURES, my_ver) result = store.get(LaunchDarkly::FEATURES, flag[:key]) @@ -64,15 +65,14 @@ def add_concurrent_modifier(store, other_client, flag, start_version, end_versio end it "handles upsert race condition against external client with higher version" do - store = create_redis_store other_client = Redis.new({ url: "redis://localhost:6379" }) + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 3, 3) + store = create_redis_store({ test_hook: test_hook }) begin - flag = { key: "foo", version: 1 } store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - add_concurrent_modifier(store, other_client, flag, 3, 3) - my_ver = { key: "foo", version: 2 } store.upsert(LaunchDarkly::FEATURES, my_ver) result = store.get(LaunchDarkly::FEATURES, flag[:key]) From 4d34bc4811d1ee4ae7c65aa58e5d9c6e0ec0b28a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 17:53:42 -0800 Subject: [PATCH 048/182] fix method reference --- lib/ldclient-rb/integrations.rb | 6 +++--- lib/ldclient-rb/redis_store.rb | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 21910b09..8f806fbb 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -9,7 +9,7 @@ module Redis # # @return [String] the default Redis URL # - def default_redis_url + def self.default_redis_url 'redis://localhost:6379/0' end @@ -18,7 +18,7 @@ def default_redis_url # # @return [String] the default key prefix # - def default_prefix + def self.default_prefix 'launchdarkly' end @@ -40,7 +40,7 @@ def default_prefix # @option opts [Object] :pool custom connection pool, if desired # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def new_feature_store(opts) + def self.new_feature_store(opts) return RedisFeatureStore.new(opts) end end diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 99912f5f..6a429ce0 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -36,13 +36,13 @@ def initialize(opts) @redis_opts[:url] = opts[:redis_url] end if !@redis_opts.include?(:url) - @redis_opts[:url] = LaunchDarkly::Integrations::Redis.default_redis_url + @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url end max_connections = opts[:max_connections] || 16 @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do Redis.new(@redis_opts) end - @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis.default_prefix + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix @logger = opts[:logger] || Config.default_logger @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented @@ -184,14 +184,14 @@ def initialize(opts = {}) # running at `localhost` with its default port. # def self.default_redis_url - LaunchDarkly::Integrations::Redis.default_redis_url + LaunchDarkly::Integrations::Redis::default_redis_url end # # Default value for the `prefix` constructor parameter. # def self.default_prefix - LaunchDarkly::Integrations::Redis.default_prefix + LaunchDarkly::Integrations::Redis::default_prefix end def get(kind, key) From 19182adce2bfa73a4e7fb9fd7edccd604edc7ac6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 18:14:04 -0800 Subject: [PATCH 049/182] misc cleanup --- lib/ldclient-rb/integrations.rb | 32 ++-- lib/ldclient-rb/interfaces.rb | 2 +- lib/ldclient-rb/redis_store.rb | 4 +- spec/integrations_helpers_spec.rb | 276 ------------------------------ 4 files changed, 25 insertions(+), 289 deletions(-) delete mode 100644 spec/integrations_helpers_spec.rb diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 8f806fbb..4d49d1c4 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,7 +1,13 @@ require "concurrent/atomics" module LaunchDarkly + # + # Tools for connecting the LaunchDarkly client to other software. + # module Integrations + # + # Integration with [Redis](https://redis.io/). + # module Redis # # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of @@ -45,7 +51,10 @@ def self.new_feature_store(opts) end end - module Helpers + # + # Support code that may be useful for integrations. + # + module Util # # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} # pattern that delegates part of its behavior to another object, while providing optional caching @@ -59,10 +68,6 @@ module Helpers class CachingStoreWrapper include LaunchDarkly::Interfaces::FeatureStore - INITED_CACHE_KEY = "$inited" - - private_constant :INITED_CACHE_KEY - # # Creates a new store wrapper instance. # @@ -75,8 +80,8 @@ def initialize(core, opts) @core = core expiration_seconds = opts[:expiration] || 15 - capacity = opts[:capacity] || 1000 if expiration_seconds > 0 + capacity = opts[:capacity] || 1000 @cache = ExpiringCache.new(capacity, expiration_seconds) else @cache = nil @@ -146,10 +151,10 @@ def initialized? if @cache.nil? result = @core.initialized_internal? else - result = @cache[INITED_CACHE_KEY] + result = @cache[inited_cache_key] if result.nil? result = @core.initialized_internal? - @cache[INITED_CACHE_KEY] = result + @cache[inited_cache_key] = result end end @@ -163,12 +168,19 @@ def stop private + # We use just one cache for 3 kinds of objects. Individual entities use a key like 'features:my-flag'. + def item_cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + # The result of a call to get_all_internal is cached using the "kind" object as a key. def all_cache_key(kind) kind end - def item_cache_key(kind, key) - kind[:namespace] + ":" + key.to_s + # The result of initialized_internal? is cached using this key. + def inited_cache_key + "$inited" end def item_if_not_deleted(item) diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 09e7797d..6226cbe1 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -20,7 +20,7 @@ module Interfaces # # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new - # implementation, see {LaunchDarkly::Integrations::Helpers} for tools that can make this task + # implementation, see {LaunchDarkly::Integrations::Util} for tools that can make this task # simpler. # module FeatureStore diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 6a429ce0..97cec272 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -12,7 +12,7 @@ module LaunchDarkly # installed. Then, create an instance and store it in the `feature_store` property # of your client configuration. # - # @deprecated Use {LaunchDarkly::Integrations::Redis#new_feature_store} instead. This specific + # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific # implementation class may change in the future. # class RedisFeatureStore @@ -176,7 +176,7 @@ def initialize(opts = {}) end @core = RedisFeatureStoreCore.new(opts) - @wrapper = LaunchDarkly::Integrations::Helpers::CachingStoreWrapper.new(@core, opts) + @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(@core, opts) end # diff --git a/spec/integrations_helpers_spec.rb b/spec/integrations_helpers_spec.rb deleted file mode 100644 index 24404a72..00000000 --- a/spec/integrations_helpers_spec.rb +++ /dev/null @@ -1,276 +0,0 @@ -require "spec_helper" - -describe LaunchDarkly::Integrations::Helpers::CachingStoreWrapper do - subject { LaunchDarkly::Integrations::Helpers::CachingStoreWrapper } - - THINGS = { namespace: "things" } - - shared_examples "tests" do |cached| - opts = cached ? { expiration: 30 } : { expiration: 0 } - - it "gets item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2 } - - core.force_set(THINGS, itemv1) - expect(wrapper.get(THINGS, key)).to eq itemv1 - - core.force_set(THINGS, itemv2) - expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet - end - - it "gets deleted item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1, deleted: true } - itemv2 = { key: key, version: 2, deleted: false } - - core.force_set(THINGS, itemv1) - expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true - - core.force_set(THINGS, itemv2) - expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet - end - - it "gets missing item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - item = { key: key, version: 1 } - - expect(wrapper.get(THINGS, key)).to eq nil - - core.force_set(THINGS, item) - expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result - end - - it "gets all items" do - core = MockCore.new - wrapper = subject.new(core, opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1 } - - core.force_set(THINGS, item1) - core.force_set(THINGS, item2) - expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 }) - - core.force_remove(THINGS, item2[:key]) - expect(wrapper.all(THINGS)).to eq (cached ? - { item1[:key] => item1, item2[:key] => item2 } : - { item1[:key] => item1 }) - end - - it "gets all items filtering out deleted items" do - core = MockCore.new - wrapper = subject.new(core, opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1, deleted: true } - - core.force_set(THINGS, item1) - core.force_set(THINGS, item2) - expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 }) - end - - it "upserts item successfully" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2 } - - wrapper.upsert(THINGS, itemv1) - expect(core.data[THINGS][key]).to eq itemv1 - - wrapper.upsert(THINGS, itemv2) - expect(core.data[THINGS][key]).to eq itemv2 - - # if we have a cache, verify that the new item is now cached by writing a different value - # to the underlying data - Get should still return the cached item - if cached - itemv3 = { key: key, version: 3 } - core.force_set(THINGS, itemv3) - end - - expect(wrapper.get(THINGS, key)).to eq itemv2 - end - - it "deletes item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2, deleted: true } - itemv3 = { key: key, version: 3 } - - core.force_set(THINGS, itemv1) - expect(wrapper.get(THINGS, key)).to eq itemv1 - - wrapper.delete(THINGS, key, 2) - expect(core.data[THINGS][key]).to eq itemv2 - - core.force_set(THINGS, itemv3) # make a change that bypasses the cache - - expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3) - end - end - - context "cached" do - include_examples "tests", true - - cached_opts = { expiration: 30 } - - it "get uses values from init" do - core = MockCore.new - wrapper = subject.new(core, cached_opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1 } - - wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) - core.force_remove(THINGS, item1[:key]) - - expect(wrapper.get(THINGS, item1[:key])).to eq item1 - end - - it "get all uses values from init" do - core = MockCore.new - wrapper = subject.new(core, cached_opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1 } - - wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) - core.force_remove(THINGS, item1[:key]) - - expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 }) - end - - it "upsert doesn't update cache if unsuccessful" do - # This is for an upsert where the data in the store has a higher version. In an uncached - # store, this is just a no-op as far as the wrapper is concerned so there's nothing to - # test here. In a cached store, we need to verify that the cache has been refreshed - # using the data that was found in the store. - core = MockCore.new - wrapper = subject.new(core, cached_opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2 } - - wrapper.upsert(THINGS, itemv2) - expect(core.data[THINGS][key]).to eq itemv2 - - wrapper.upsert(THINGS, itemv1) - expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same - - itemv3 = { key: key, version: 3 } - core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache - expect(wrapper.get(THINGS, key)).to eq itemv2 - end - - it "initialized? can cache false result" do - core = MockCore.new - wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test - - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - core.inited = true - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - sleep(0.5) - - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - - # From this point on it should remain true and the method should not be called - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - end - end - - context "uncached" do - include_examples "tests", false - - uncached_opts = { expiration: 0 } - - it "queries internal initialized state only if not already inited" do - core = MockCore.new - wrapper = subject.new(core, uncached_opts) - - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - core.inited = true - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - - core.inited = false - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - end - - it "does not query internal initialized state if init has been called" do - core = MockCore.new - wrapper = subject.new(core, uncached_opts) - - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - wrapper.init({}) - - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 1 - end - end - - class MockCore - def initialize - @data = {} - @inited = false - @inited_query_count = 0 - end - - attr_reader :data - attr_reader :inited_query_count - attr_accessor :inited - - def force_set(kind, item) - @data[kind] = {} if !@data.has_key?(kind) - @data[kind][item[:key]] = item - end - - def force_remove(kind, key) - @data[kind].delete(key) if @data.has_key?(kind) - end - - def init_internal(all_data) - @data = all_data - @inited = true - end - - def get_internal(kind, key) - items = @data[kind] - items.nil? ? nil : items[key] - end - - def get_all_internal(kind) - @data[kind] - end - - def upsert_internal(kind, item) - @data[kind] = {} if !@data.has_key?(kind) - old_item = @data[kind][item[:key]] - return old_item if !old_item.nil? && old_item[:version] >= item[:version] - @data[kind][item[:key]] = item - item - end - - def initialized_internal? - @inited_query_count = @inited_query_count + 1 - @inited - end - end -end From 5941638a33c7ecf703a565eabab0584871da8670 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 18:23:17 -0800 Subject: [PATCH 050/182] comment --- lib/ldclient-rb/integrations.rb | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 4d49d1c4..2df5e04c 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -240,6 +240,9 @@ def get_all_internal(kind) # parameter if the update succeeded, or the previously existing entity in the store if the # update failed; this is used for the caching logic). # + # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. + # # @param kind [Object] the kind of entity to add or update # @param item [Hash] the entity to add or update # @return [Hash] the entity as it now exists in the store after the update From b4cf610105cba3f2e540d5c933b4826bb8a85b77 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 18:25:04 -0800 Subject: [PATCH 051/182] re-add test file --- spec/integrations_util_spec.rb | 276 +++++++++++++++++++++++++++++++++ 1 file changed, 276 insertions(+) create mode 100644 spec/integrations_util_spec.rb diff --git a/spec/integrations_util_spec.rb b/spec/integrations_util_spec.rb new file mode 100644 index 00000000..e7890802 --- /dev/null +++ b/spec/integrations_util_spec.rb @@ -0,0 +1,276 @@ +require "spec_helper" + +describe LaunchDarkly::Integrations::Util::CachingStoreWrapper do + subject { LaunchDarkly::Integrations::Util::CachingStoreWrapper } + + THINGS = { namespace: "things" } + + shared_examples "tests" do |cached| + opts = cached ? { expiration: 30 } : { expiration: 0 } + + it "gets item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets deleted item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1, deleted: true } + itemv2 = { key: key, version: 2, deleted: false } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets missing item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + item = { key: key, version: 1 } + + expect(wrapper.get(THINGS, key)).to eq nil + + core.force_set(THINGS, item) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result + end + + it "gets all items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 }) + + core.force_remove(THINGS, item2[:key]) + expect(wrapper.all(THINGS)).to eq (cached ? + { item1[:key] => item1, item2[:key] => item2 } : + { item1[:key] => item1 }) + end + + it "gets all items filtering out deleted items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1, deleted: true } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 }) + end + + it "upserts item successfully" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv1 + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + # if we have a cache, verify that the new item is now cached by writing a different value + # to the underlying data - Get should still return the cached item + if cached + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) + end + + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "deletes item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2, deleted: true } + itemv3 = { key: key, version: 3 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + wrapper.delete(THINGS, key, 2) + expect(core.data[THINGS][key]).to eq itemv2 + + core.force_set(THINGS, itemv3) # make a change that bypasses the cache + + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3) + end + end + + context "cached" do + include_examples "tests", true + + cached_opts = { expiration: 30 } + + it "get uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.get(THINGS, item1[:key])).to eq item1 + end + + it "get all uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 }) + end + + it "upsert doesn't update cache if unsuccessful" do + # This is for an upsert where the data in the store has a higher version. In an uncached + # store, this is just a no-op as far as the wrapper is concerned so there's nothing to + # test here. In a cached store, we need to verify that the cache has been refreshed + # using the data that was found in the store. + core = MockCore.new + wrapper = subject.new(core, cached_opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same + + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "initialized? can cache false result" do + core = MockCore.new + wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + sleep(0.5) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + # From this point on it should remain true and the method should not be called + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + end + + context "uncached" do + include_examples "tests", false + + uncached_opts = { expiration: 0 } + + it "queries internal initialized state only if not already inited" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + core.inited = false + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + + it "does not query internal initialized state if init has been called" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + wrapper.init({}) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 1 + end + end + + class MockCore + def initialize + @data = {} + @inited = false + @inited_query_count = 0 + end + + attr_reader :data + attr_reader :inited_query_count + attr_accessor :inited + + def force_set(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + @data[kind][item[:key]] = item + end + + def force_remove(kind, key) + @data[kind].delete(key) if @data.has_key?(kind) + end + + def init_internal(all_data) + @data = all_data + @inited = true + end + + def get_internal(kind, key) + items = @data[kind] + items.nil? ? nil : items[key] + end + + def get_all_internal(kind) + @data[kind] + end + + def upsert_internal(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + old_item = @data[kind][item[:key]] + return old_item if !old_item.nil? && old_item[:version] >= item[:version] + @data[kind][item[:key]] = item + item + end + + def initialized_internal? + @inited_query_count = @inited_query_count + 1 + @inited + end + end +end From 3f9ef3b9166832d2f3bb551d348822697d4a7d38 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 19:05:59 -0800 Subject: [PATCH 052/182] misc cleanup --- lib/ldclient-rb.rb | 3 +- lib/ldclient-rb/impl.rb | 10 + .../impl/integrations/redis_impl.rb | 153 ++++++++++ lib/ldclient-rb/integrations.rb | 262 +----------------- lib/ldclient-rb/integrations/redis.rb | 48 ++++ .../integrations/util/store_wrapper.rb | 222 +++++++++++++++ lib/ldclient-rb/interfaces.rb | 3 + lib/ldclient-rb/redis_store.rb | 153 +--------- .../store_wrapper_spec.rb} | 0 9 files changed, 454 insertions(+), 400 deletions(-) create mode 100644 lib/ldclient-rb/impl.rb create mode 100644 lib/ldclient-rb/impl/integrations/redis_impl.rb create mode 100644 lib/ldclient-rb/integrations/redis.rb create mode 100644 lib/ldclient-rb/integrations/util/store_wrapper.rb rename spec/{integrations_util_spec.rb => integrations/store_wrapper_spec.rb} (100%) diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index a1d7ffd9..e355a304 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -17,7 +17,6 @@ require "ldclient-rb/non_blocking_thread_pool" require "ldclient-rb/event_summarizer" require "ldclient-rb/events" -require "ldclient-rb/redis_store" -require "ldclient-rb/integrations" require "ldclient-rb/requestor" require "ldclient-rb/file_data_source" +require "ldclient-rb/integrations" diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb new file mode 100644 index 00000000..85079baf --- /dev/null +++ b/lib/ldclient-rb/impl.rb @@ -0,0 +1,10 @@ + +module LaunchDarkly + # + # Low-level implementation classes. Everything in this module should be considered non-public + # and subject to change with any release. + # + module Impl + # code is in ldclient-rb/impl/ + end +end diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb new file mode 100644 index 00000000..325b936e --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -0,0 +1,153 @@ +require "concurrent/atomics" +require "json" + +require "ldclient-rb/integrations/util/store_wrapper" +require "ldclient-rb/redis_store" # eventually that file should be moved inside this one + +module LaunchDarkly + module Impl + module Integrations + module Redis + # + # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. + # + class RedisFeatureStoreCore + begin + require "redis" + require "connection_pool" + REDIS_ENABLED = true + rescue ScriptError, StandardError + REDIS_ENABLED = false + end + + def initialize(opts) + if !REDIS_ENABLED + raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool") + end + + @redis_opts = opts[:redis_opts] || Hash.new + if opts[:redis_url] + @redis_opts[:url] = opts[:redis_url] + end + if !@redis_opts.include?(:url) + @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url + end + max_connections = opts[:max_connections] || 16 + @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do + ::Redis.new(@redis_opts) + end + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix + @logger = opts[:logger] || Config.default_logger + @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented + + @stopped = Concurrent::AtomicBoolean.new(false) + + with_connection do |redis| + @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + and prefix: #{@prefix}") + end + end + + def init_internal(all_data) + count = 0 + with_connection do |redis| + all_data.each do |kind, items| + redis.multi do |multi| + multi.del(items_key(kind)) + count = count + items.count + items.each { |key, item| + redis.hset(items_key(kind), key, item.to_json) + } + end + end + end + @logger.info { "RedisFeatureStore: initialized with #{count} items" } + end + + def get_internal(kind, key) + with_connection do |redis| + get_redis(redis, kind, key) + end + end + + def get_all_internal(kind) + fs = {} + with_connection do |redis| + hashfs = redis.hgetall(items_key(kind)) + hashfs.each do |k, json_item| + f = JSON.parse(json_item, symbolize_names: true) + fs[k.to_sym] = f + end + end + fs + end + + def upsert_internal(kind, new_item) + base_key = items_key(kind) + key = new_item[:key] + try_again = true + final_item = new_item + while try_again + try_again = false + with_connection do |redis| + redis.watch(base_key) do + old_item = get_redis(redis, kind, key) + before_update_transaction(base_key, key) + if old_item.nil? || old_item[:version] < new_item[:version] + result = redis.multi do |multi| + multi.hset(base_key, key, new_item.to_json) + end + if result.nil? + @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } + try_again = true + end + else + final_item = old_item + action = new_item[:deleted] ? "delete" : "update" + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ + in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } + end + redis.unwatch + end + end + end + final_item + end + + def initialized_internal? + with_connection { |redis| redis.exists(items_key(FEATURES)) } + end + + def stop + if @stopped.make_true + @pool.shutdown { |redis| redis.close } + end + end + + private + + def before_update_transaction(base_key, key) + @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? + end + + def items_key(kind) + @prefix + ":" + kind[:namespace] + end + + def cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + def with_connection + @pool.with { |redis| yield(redis) } + end + + def get_redis(redis, kind, key) + json_item = redis.hget(items_key(kind), key) + json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 2df5e04c..02b2d435 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,4 +1,4 @@ -require "concurrent/atomics" +require "ldclient-rb/integrations/redis" module LaunchDarkly # @@ -8,265 +8,19 @@ module Integrations # # Integration with [Redis](https://redis.io/). # + # @since 5.5.0 + # module Redis - # - # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of - # Redis running at `localhost` with its default port. - # - # @return [String] the default Redis URL - # - def self.default_redis_url - 'redis://localhost:6379/0' - end - - # - # Default value for the `prefix` option for {new_feature_store}. - # - # @return [String] the default key prefix - # - def self.default_prefix - 'launchdarkly' - end - - # - # Creates a Redis-backed persistent feature store. - # - # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, - # put the object returned by this method into the `feature_store` property of your - # client configuration ({LaunchDarkly::Config}). - # - # @param opts [Hash] the configuration options - # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) - # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) - # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly - # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching - # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @option opts [Object] :pool custom connection pool, if desired - # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object - # - def self.new_feature_store(opts) - return RedisFeatureStore.new(opts) - end + # code is in ldclient-rb/impl/integrations/redis_impl end # - # Support code that may be useful for integrations. + # Support code that may be helpful in creating integrations. + # + # @since 5.5.0 # module Util - # - # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} - # pattern that delegates part of its behavior to another object, while providing optional caching - # behavior and other logic that would otherwise be repeated in every feature store implementation. - # This makes it easier to create new database integrations by implementing only the database-specific - # logic. - # - # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner - # implementation object. - # - class CachingStoreWrapper - include LaunchDarkly::Interfaces::FeatureStore - - # - # Creates a new store wrapper instance. - # - # @param core [Object] an object that implements the {FeatureStoreCore} methods - # @param opts [Hash] a hash that may include cache-related options; all others will be ignored - # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching - # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # - def initialize(core, opts) - @core = core - - expiration_seconds = opts[:expiration] || 15 - if expiration_seconds > 0 - capacity = opts[:capacity] || 1000 - @cache = ExpiringCache.new(capacity, expiration_seconds) - else - @cache = nil - end - - @inited = Concurrent::AtomicBoolean.new(false) - end - - def init(all_data) - @core.init_internal(all_data) - @inited.make_true - - if !@cache.nil? - @cache.clear - all_data.each do |kind, items| - @cache[kind] = items_if_not_deleted(items) - items.each do |key, item| - @cache[item_cache_key(kind, key)] = [item] - end - end - end - end - - def get(kind, key) - if !@cache.nil? - cache_key = item_cache_key(kind, key) - cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values - return item_if_not_deleted(cached[0]) if !cached.nil? - end - - item = @core.get_internal(kind, key) - - if !@cache.nil? - @cache[cache_key] = [item] - end - - item_if_not_deleted(item) - end - - def all(kind) - if !@cache.nil? - items = @cache[all_cache_key(kind)] - return items if !items.nil? - end - - items = items_if_not_deleted(@core.get_all_internal(kind)) - @cache[all_cache_key(kind)] = items if !@cache.nil? - items - end - - def upsert(kind, item) - new_state = @core.upsert_internal(kind, item) - - if !@cache.nil? - @cache[item_cache_key(kind, item[:key])] = [new_state] - @cache.delete(all_cache_key(kind)) - end - end - - def delete(kind, key, version) - upsert(kind, { key: key, version: version, deleted: true }) - end - - def initialized? - return true if @inited.value - - if @cache.nil? - result = @core.initialized_internal? - else - result = @cache[inited_cache_key] - if result.nil? - result = @core.initialized_internal? - @cache[inited_cache_key] = result - end - end - - @inited.make_true if result - result - end - - def stop - @core.stop - end - - private - - # We use just one cache for 3 kinds of objects. Individual entities use a key like 'features:my-flag'. - def item_cache_key(kind, key) - kind[:namespace] + ":" + key.to_s - end - - # The result of a call to get_all_internal is cached using the "kind" object as a key. - def all_cache_key(kind) - kind - end - - # The result of initialized_internal? is cached using this key. - def inited_cache_key - "$inited" - end - - def item_if_not_deleted(item) - (item.nil? || item[:deleted]) ? nil : item - end - - def items_if_not_deleted(items) - items.select { |key, item| !item[:deleted] } - end - end - - # - # This module describes the methods that you must implement on your own object in order to - # use {CachingStoreWrapper}. - # - module FeatureStoreCore - # - # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, - # but the wrapper will take care of updating the cache if caching is enabled. - # - # @param all_data [Hash] a hash where each key is one of the data kind objects, and each - # value is in turn a hash of string keys to entities - # - def init_internal(all_data) - end - - # - # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} - # except that 1. the wrapper will take care of filtering out deleted entities by checking the - # `:deleted` property, so you can just return exactly what was in the data store, and 2. the - # wrapper will take care of checking and updating the cache if caching is enabled. - # - # @param kind [Object] the kind of entity to get - # @param key [String] the unique key of the entity to get - # @return [Hash] the entity; nil if the key was not found - # - def get_internal(kind, key) - end - - # - # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} - # except that 1. the wrapper will take care of filtering out deleted entities by checking the - # `:deleted` property, so you can just return exactly what was in the data store, and 2. the - # wrapper will take care of checking and updating the cache if caching is enabled. - # - # @param kind [Object] the kind of entity to get - # @return [Hash] a hash where each key is the entity's `:key` property and each value - # is the entity - # - def get_all_internal(kind) - end - - # - # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} - # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. - # the method is expected to return the final state of the entity (i.e. either the `item` - # parameter if the update succeeded, or the previously existing entity in the store if the - # update failed; this is used for the caching logic). - # - # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} - # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. - # - # @param kind [Object] the kind of entity to add or update - # @param item [Hash] the entity to add or update - # @return [Hash] the entity as it now exists in the store after the update - # - def upsert_internal(kind, item) - end - - # - # Checks whether this store has been initialized. This is the same as - # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern - # for efficiency, because the wrapper will use caching and memoization in order to call the method - # as little as possible. - # - # @return [Boolean] true if the store is in an initialized state - # - def initialized_internal? - end - - # - # Performs any necessary cleanup to shut down the store when the client is being shut down. - # - def stop - end - end + # code is in ldclient-rb/integrations/util/ end end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb new file mode 100644 index 00000000..54221f76 --- /dev/null +++ b/lib/ldclient-rb/integrations/redis.rb @@ -0,0 +1,48 @@ +require "ldclient-rb/impl/integrations/redis_impl" + +module LaunchDarkly + module Integrations + module Redis + # + # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of + # Redis running at `localhost` with its default port. + # + # @return [String] the default Redis URL + # + def self.default_redis_url + 'redis://localhost:6379/0' + end + + # + # Default value for the `prefix` option for {new_feature_store}. + # + # @return [String] the default key prefix + # + def self.default_prefix + 'launchdarkly' + end + + # + # Creates a Redis-backed persistent feature store. + # + # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, + # put the object returned by this method into the `feature_store` property of your + # client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @option opts [Object] :pool custom connection pool, if desired + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def self.new_feature_store(opts) + return RedisFeatureStore.new(opts) + end + end + end +end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb new file mode 100644 index 00000000..58ecb2c4 --- /dev/null +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -0,0 +1,222 @@ +require "concurrent/atomics" + +require "ldclient-rb/expiring_cache" + +module LaunchDarkly + module Integrations + module Util + # + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} + # pattern that delegates part of its behavior to another object, while providing optional caching + # behavior and other logic that would otherwise be repeated in every feature store implementation. + # This makes it easier to create new database integrations by implementing only the database-specific + # logic. + # + # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner + # implementation object. + # + class CachingStoreWrapper + include LaunchDarkly::Interfaces::FeatureStore + + # + # Creates a new store wrapper instance. + # + # @param core [Object] an object that implements the {FeatureStoreCore} methods + # @param opts [Hash] a hash that may include cache-related options; all others will be ignored + # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # + def initialize(core, opts) + @core = core + + expiration_seconds = opts[:expiration] || 15 + if expiration_seconds > 0 + capacity = opts[:capacity] || 1000 + @cache = ExpiringCache.new(capacity, expiration_seconds) + else + @cache = nil + end + + @inited = Concurrent::AtomicBoolean.new(false) + end + + def init(all_data) + @core.init_internal(all_data) + @inited.make_true + + if !@cache.nil? + @cache.clear + all_data.each do |kind, items| + @cache[kind] = items_if_not_deleted(items) + items.each do |key, item| + @cache[item_cache_key(kind, key)] = [item] + end + end + end + end + + def get(kind, key) + if !@cache.nil? + cache_key = item_cache_key(kind, key) + cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values + return item_if_not_deleted(cached[0]) if !cached.nil? + end + + item = @core.get_internal(kind, key) + + if !@cache.nil? + @cache[cache_key] = [item] + end + + item_if_not_deleted(item) + end + + def all(kind) + if !@cache.nil? + items = @cache[all_cache_key(kind)] + return items if !items.nil? + end + + items = items_if_not_deleted(@core.get_all_internal(kind)) + @cache[all_cache_key(kind)] = items if !@cache.nil? + items + end + + def upsert(kind, item) + new_state = @core.upsert_internal(kind, item) + + if !@cache.nil? + @cache[item_cache_key(kind, item[:key])] = [new_state] + @cache.delete(all_cache_key(kind)) + end + end + + def delete(kind, key, version) + upsert(kind, { key: key, version: version, deleted: true }) + end + + def initialized? + return true if @inited.value + + if @cache.nil? + result = @core.initialized_internal? + else + result = @cache[inited_cache_key] + if result.nil? + result = @core.initialized_internal? + @cache[inited_cache_key] = result + end + end + + @inited.make_true if result + result + end + + def stop + @core.stop + end + + private + + # We use just one cache for 3 kinds of objects. Individual entities use a key like 'features:my-flag'. + def item_cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + # The result of a call to get_all_internal is cached using the "kind" object as a key. + def all_cache_key(kind) + kind + end + + # The result of initialized_internal? is cached using this key. + def inited_cache_key + "$inited" + end + + def item_if_not_deleted(item) + (item.nil? || item[:deleted]) ? nil : item + end + + def items_if_not_deleted(items) + items.select { |key, item| !item[:deleted] } + end + end + + # + # This module describes the methods that you must implement on your own object in order to + # use {CachingStoreWrapper}. + # + module FeatureStoreCore + # + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, + # but the wrapper will take care of updating the cache if caching is enabled. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # + def init_internal(all_data) + end + + # + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found + # + def get_internal(kind, key) + end + + # + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def get_all_internal(kind) + end + + # + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} + # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. + # the method is expected to return the final state of the entity (i.e. either the `item` + # parameter if the update succeeded, or the previously existing entity in the store if the + # update failed; this is used for the caching logic). + # + # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # @return [Hash] the entity as it now exists in the store after the update + # + def upsert_internal(kind, item) + end + + # + # Checks whether this store has been initialized. This is the same as + # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern + # for efficiency, because the wrapper will use caching and memoization in order to call the method + # as little as possible. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized_internal? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + def stop + end + end + end + end +end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 6226cbe1..510e1636 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -1,5 +1,8 @@ module LaunchDarkly + # + # Mixins that define the required methods of various pluggable components used by the client. + # module Interfaces # # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 97cec272..32a9507d 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -1,6 +1,9 @@ require "concurrent/atomics" require "json" +require "ldclient-rb/interfaces" +require "ldclient-rb/impl/integrations/redis_impl" + module LaunchDarkly # # An implementation of the LaunchDarkly client's feature store that uses a Redis @@ -16,146 +19,12 @@ module LaunchDarkly # implementation class may change in the future. # class RedisFeatureStore - begin - require "redis" - require "connection_pool" - REDIS_ENABLED = true - rescue ScriptError, StandardError - REDIS_ENABLED = false - end - include LaunchDarkly::Interfaces::FeatureStore - # - # Internal implementation of the Redis feature store. We put a CachingStoreWrapper around this. - # - class RedisFeatureStoreCore - def initialize(opts) - @redis_opts = opts[:redis_opts] || Hash.new - if opts[:redis_url] - @redis_opts[:url] = opts[:redis_url] - end - if !@redis_opts.include?(:url) - @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url - end - max_connections = opts[:max_connections] || 16 - @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do - Redis.new(@redis_opts) - end - @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix - @logger = opts[:logger] || Config.default_logger - @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented - - @stopped = Concurrent::AtomicBoolean.new(false) - - with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ - and prefix: #{@prefix}") - end - end - - def init_internal(all_data) - count = 0 - with_connection do |redis| - all_data.each do |kind, items| - redis.multi do |multi| - multi.del(items_key(kind)) - count = count + items.count - items.each { |key, item| - redis.hset(items_key(kind), key, item.to_json) - } - end - end - end - @logger.info { "RedisFeatureStore: initialized with #{count} items" } - end - - def get_internal(kind, key) - with_connection do |redis| - get_redis(redis, kind, key) - end - end - - def get_all_internal(kind) - fs = {} - with_connection do |redis| - hashfs = redis.hgetall(items_key(kind)) - hashfs.each do |k, json_item| - f = JSON.parse(json_item, symbolize_names: true) - fs[k.to_sym] = f - end - end - fs - end - - def upsert_internal(kind, new_item) - base_key = items_key(kind) - key = new_item[:key] - try_again = true - final_item = new_item - while try_again - try_again = false - with_connection do |redis| - redis.watch(base_key) do - old_item = get_redis(redis, kind, key) - before_update_transaction(base_key, key) - if old_item.nil? || old_item[:version] < new_item[:version] - result = redis.multi do |multi| - multi.hset(base_key, key, new_item.to_json) - end - if result.nil? - @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } - try_again = true - end - else - final_item = old_item - action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ -in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } - end - redis.unwatch - end - end - end - final_item - end - - def initialized_internal? - with_connection { |redis| redis.exists(items_key(FEATURES)) } - end - - def stop - if @stopped.make_true - @pool.shutdown { |redis| redis.close } - end - end - - private - - # exposed for testing - def before_update_transaction(base_key, key) - @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? - end - - def items_key(kind) - @prefix + ":" + kind[:namespace] - end - - def cache_key(kind, key) - kind[:namespace] + ":" + key.to_s - end - - def with_connection - @pool.with { |redis| yield(redis) } - end - - def get_redis(redis, kind, key) - json_item = redis.hget(items_key(kind), key) - json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) - end - end - - private_constant :RedisFeatureStoreCore + # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating + # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical + # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate + # away from exposing these concrete classes and use factory methods instead. # # Constructor for a RedisFeatureStore instance. @@ -171,12 +40,8 @@ def get_redis(redis, kind, key) # @option opts [Object] :pool custom connection pool, if desired # def initialize(opts = {}) - if !REDIS_ENABLED - raise RuntimeError.new("can't use RedisFeatureStore because one of these gems is missing: redis, connection_pool") - end - - @core = RedisFeatureStoreCore.new(opts) - @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(@core, opts) + core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) + @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end # diff --git a/spec/integrations_util_spec.rb b/spec/integrations/store_wrapper_spec.rb similarity index 100% rename from spec/integrations_util_spec.rb rename to spec/integrations/store_wrapper_spec.rb From fa831f9a3fc6db3bf1eabff4030eaa13ae11d03c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 19:18:27 -0800 Subject: [PATCH 053/182] misc cleanup --- lib/ldclient-rb/impl/integrations/redis_impl.rb | 3 --- lib/ldclient-rb/integrations.rb | 1 + lib/ldclient-rb/integrations/redis.rb | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 325b936e..497b01c5 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -1,9 +1,6 @@ require "concurrent/atomics" require "json" -require "ldclient-rb/integrations/util/store_wrapper" -require "ldclient-rb/redis_store" # eventually that file should be moved inside this one - module LaunchDarkly module Impl module Integrations diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 02b2d435..c48074a0 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,4 +1,5 @@ require "ldclient-rb/integrations/redis" +require "ldclient-rb/integrations/util/store_wrapper" module LaunchDarkly # diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 54221f76..b81097c6 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -1,4 +1,4 @@ -require "ldclient-rb/impl/integrations/redis_impl" +require "ldclient-rb/redis_store" # eventually we will just refer to impl/integrations/redis_impl directly module LaunchDarkly module Integrations From ea68da433cc5eaeaeac8c557364c94a20a21d93f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 10:33:52 -0800 Subject: [PATCH 054/182] initial DynamoDB implementation --- .circleci/config.yml | 11 + ldclient-rb.gemspec | 1 + .../impl/integrations/dynamodb_impl.rb | 231 ++++++++++++++++++ lib/ldclient-rb/integrations.rb | 10 + lib/ldclient-rb/integrations/dynamodb.rb | 31 +++ .../dynamodb_feature_store_spec.rb | 77 ++++++ 6 files changed, 361 insertions(+) create mode 100644 lib/ldclient-rb/impl/integrations/dynamodb_impl.rb create mode 100644 lib/ldclient-rb/integrations/dynamodb.rb create mode 100644 spec/integrations/dynamodb_feature_store_spec.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..f19ae7bc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -34,26 +34,31 @@ jobs: docker: - image: circleci/ruby:2.2.9-jessie - image: redis + - image: amazon/dynamodb-local test-2.3: <<: *ruby-docker-template docker: - image: circleci/ruby:2.3.6-jessie - image: redis + - image: amazon/dynamodb-local test-2.4: <<: *ruby-docker-template docker: - image: circleci/ruby:2.4.4-stretch - image: redis + - image: amazon/dynamodb-local test-2.5: <<: *ruby-docker-template docker: - image: circleci/ruby:2.5.1-stretch - image: redis + - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: - image: circleci/jruby:9-jdk - image: redis + - image: amazon/dynamodb-local # The following very slow job uses an Ubuntu container to run the Ruby versions that # CircleCI doesn't provide Docker images for. @@ -63,8 +68,11 @@ jobs: environment: - RUBIES: "jruby-9.1.17.0" steps: + - run: sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" - run: sudo apt-get -q update - run: sudo apt-get -qy install redis-server + - run: sudo apt-cache policy docker-ce + - run: sudo apt-get -qy install docker-ce - checkout - run: name: install all Ruby versions @@ -84,6 +92,9 @@ jobs: bundle install; mv Gemfile.lock "Gemfile.lock.$i" done + - run: + command: docker run -p 8000:8000 amazon/dynamodb-local + background: true - run: name: run tests for all versions shell: /bin/bash -leo pipefail diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 0b8f4f9d..8b1f4cc7 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -21,6 +21,7 @@ Gem::Specification.new do |spec| spec.require_paths = ["lib"] spec.extensions = 'ext/mkrf_conf.rb' + spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" spec.add_development_dependency "bundler", "~> 1.7" spec.add_development_dependency "rspec", "~> 3.2" spec.add_development_dependency "codeclimate-test-reporter", "~> 0" diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb new file mode 100644 index 00000000..8eb1dd2a --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -0,0 +1,231 @@ +require "concurrent/atomics" +require "json" + +module LaunchDarkly + module Impl + module Integrations + module DynamoDB + # + # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. + # + class DynamoDBFeatureStoreCore + begin + require "aws-sdk-dynamodb" + AWS_SDK_ENABLED = true + rescue ScriptError, StandardError + begin + require "aws-sdk" + AWS_SDK_ENABLED = true + rescue ScriptError, StandardError + AWS_SDK_ENABLED = false + end + end + + PARTITION_KEY = "namespace" + SORT_KEY = "key" + + VERSION_ATTRIBUTE = "version" + ITEM_JSON_ATTRIBUTE = "item" + + def initialize(table_name, opts) + if !AWS_SDK_ENABLED + raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem") + end + + @table_name = table_name + @prefix = opts[:prefix] + @logger = opts[:logger] || Config.default_logger + + @stopped = Concurrent::AtomicBoolean.new(false) + + if !opts[:existing_client].nil? + @client = opts[:existing_client] + else + @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts]) + end + + @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") + end + + def init_internal(all_data) + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + unused_old_keys = read_existing_keys(all_data.keys) + + requests = [] + num_items = 0 + + # Insert or update every provided item + all_data.each do |kind, items| + items.values.each do |item| + requests.push({ put_request: { item: marshal_item(kind, item) } }) + unused_old_keys.delete([ namespace_for_kind(kind), item[:key] ]) + num_items = num_items + 1 + end + end + + # Now delete any previously existing items whose keys were not in the current data + unused_old_keys.each do |tuple| + del_item = make_keys_hash(tuple[0], tuple[1]) + requests.push({ delete_request: { key: del_item } }) + end + + # Now set the special key that we check in initialized_internal? + inited_item = make_keys_hash(inited_key, inited_key) + requests.push({ put_request: { item: inited_item } }) + + DynamoDBUtil.batch_write_requests(@client, @table_name, requests) + + @logger.info { "Initialized table #{@table_name} with #{num_items} items" } + end + + def get_internal(kind, key) + resp = get_item_by_keys(namespace_for_kind(kind), key) + unmarshal_item(resp.item) + end + + def get_all_internal(kind) + items_out = {} + req = make_query_for_kind(kind) + while true + resp = @client.query(req) + resp.items.each do |item| + item_out = unmarshal_item(item) + items_out[item_out[:key].to_sym] = item_out + end + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + items_out + end + + def upsert_internal(kind, new_item) + encoded_item = marshal_item(kind, new_item) + begin + @client.put_item({ + table_name: @table_name, + item: encoded_item, + condition_expression: "attribute_not_exists(#namespace) or attribute_not_exists(#key) or :version > #version", + expression_attribute_names: { + "#namespace" => PARTITION_KEY, + "#key" => SORT_KEY, + "#version" => VERSION_ATTRIBUTE + }, + expression_attribute_values: { + ":version" => new_item[:version] + } + }) + new_item + rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException + # The item was not updated because there's a newer item in the database. + # We must now read the item that's in the database and return it, so CachingStoreWrapper can cache it. + get_internal(kind, new_item[:key]) + end + end + + def initialized_internal? + resp = get_item_by_keys(inited_key, inited_key) + !resp.item.nil? && resp.item.length > 0 + end + + def stop + # AWS client doesn't seem to have a close method + end + + private + + def prefixed_namespace(base_str) + (@prefix.nil? || @prefix == "") ? base_str : "#{@prefix}:#{base_str}" + end + + def namespace_for_kind(kind) + prefixed_namespace(kind[:namespace]) + end + + def inited_key + prefixed_namespace("$inited") + end + + def make_keys_hash(namespace, key) + { + PARTITION_KEY => namespace, + SORT_KEY => key + } + end + + def make_query_for_kind(kind) + { + table_name: @table_name, + consistent_read: true, + key_conditions: { + PARTITION_KEY => { + comparison_operator: "EQ", + attribute_value_list: [ namespace_for_kind(kind) ] + } + } + } + end + + def get_item_by_keys(namespace, key) + @client.get_item({ + table_name: @table_name, + key: make_keys_hash(namespace, key) + }) + end + + def read_existing_keys(kinds) + keys = Set.new + kinds.each do |kind| + req = make_query_for_kind(kind).merge({ + projection_expression: "#namespace, #key", + expression_attribute_names: { + "#namespace" => PARTITION_KEY, + "#key" => SORT_KEY + } + }) + while true + resp = @client.query(req) + resp.items.each do |item| + namespace = item[PARTITION_KEY] + key = item[SORT_KEY] + keys.add([ namespace, key ]) + end + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + end + keys + end + + def marshal_item(kind, item) + make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ + VERSION_ATTRIBUTE => item[:version], + ITEM_JSON_ATTRIBUTE => item.to_json + }) + end + + def unmarshal_item(item) + return nil if item.nil? || item.length == 0 + json_attr = item[ITEM_JSON_ATTRIBUTE] + raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? + JSON.parse(json_attr, symbolize_names: true) + end + end + + class DynamoDBUtil + # + # Calls client.batch_write_item as many times as necessary to submit all of the given requests. + # The requests array is consumed. + # + def self.batch_write_requests(client, table, requests) + batch_size = 25 + while true + chunk = requests.shift(batch_size) + break if chunk.empty? + client.batch_write_item({ request_items: { table => chunk } }) + end + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index c48074a0..029c4243 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/integrations/dynamodb" require "ldclient-rb/integrations/redis" require "ldclient-rb/integrations/util/store_wrapper" @@ -6,6 +7,15 @@ module LaunchDarkly # Tools for connecting the LaunchDarkly client to other software. # module Integrations + # + # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). + # + # @since 5.5.0 + # + module DynamoDB + # code is in ldclient-rb/impl/integrations/dynamodb_impl + end + # # Integration with [Redis](https://redis.io/). # diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb new file mode 100644 index 00000000..553f54e9 --- /dev/null +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -0,0 +1,31 @@ +require "ldclient-rb/impl/integrations/dynamodb_impl" +require "ldclient-rb/integrations/util/store_wrapper" + +module LaunchDarkly + module Integrations + module DynamoDB + # + # Creates a DynamoDB-backed persistent feature store. + # + # To use this method, you must first have the `aws_sdk` gem installed. Then, + # put the object returned by this method into the `feature_store` property of your + # client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @option opts [Object] :pool custom connection pool, if desired + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def self.new_feature_store(table_name, opts) + core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) + return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + end + end + end +end diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb new file mode 100644 index 00000000..4a0e3cbf --- /dev/null +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -0,0 +1,77 @@ +require "feature_store_spec_base" +require "aws-sdk-dynamodb" +require "spec_helper" + + +$table_name = 'LD_DYNAMODB_TEST_TABLE' +$endpoint = 'http://localhost:8000' +$my_prefix = 'testprefix' +$null_log = ::Logger.new($stdout) +$null_log.level = ::Logger::FATAL + +$dynamodb_opts = { + credentials: Aws::Credentials.new("key", "secret"), + region: "us-east-1", + endpoint: $endpoint +} + +$base_opts = { + dynamodb_opts: $dynamodb_opts, + prefix: $my_prefix, + logger: $null_log +} + +def create_dynamodb_store(opts = {}) + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + opts.merge($base_opts).merge({ expiration: 60 })) +end + +def create_dynamodb_store_uncached(opts = {}) + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + opts.merge($base_opts).merge({ expiration: 0 })) +end + +def create_table_if_necessary + client = create_test_client + begin + client.describe_table({ table_name: $table_name }) + return # no error, table exists + rescue Blahbhbhba + # fall through to code below - we'll create the table + end + + req = { + table_name: $table_name, + key_schema: [ + { attribute_name: "namespace", key_type: "HASH" }, + { attribute_name: "key", key_type: "RANGE" } + ], + attribute_definitions: [ + { attribute_name: "namespace", attribute_type: "S" }, + { attribute_name: "key", attribute_type: "S" } + ] + } + client.create_table(req) + + # When DynamoDB creates a table, it may not be ready to use immediately +end + +def create_test_client + Aws::DynamoDB::Client.new($dynamodb_opts) +end + + +describe "DynamoDB feature store" do + + # These tests will all fail if there isn't a local DynamoDB instance running. + + create_table_if_necessary + + context "with local cache" do + include_examples "feature_store", method(:create_dynamodb_store) + end + + context "without local cache" do + include_examples "feature_store", method(:create_dynamodb_store_uncached) + end +end From bde227450dee5c868e099fbc5c20de7c80b272ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 10:56:38 -0800 Subject: [PATCH 055/182] fix exception name --- spec/integrations/dynamodb_feature_store_spec.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 4a0e3cbf..98e32ed6 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -36,7 +36,7 @@ def create_table_if_necessary begin client.describe_table({ table_name: $table_name }) return # no error, table exists - rescue Blahbhbhba + rescue Aws::DynamoDB::Errors::ResourceNotFoundException # fall through to code below - we'll create the table end From 4e493172c97a5cbf745176167d3b4a5aec637e45 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 10:59:17 -0800 Subject: [PATCH 056/182] fix test setup --- spec/integrations/dynamodb_feature_store_spec.rb | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 98e32ed6..38104fb3 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -49,7 +49,11 @@ def create_table_if_necessary attribute_definitions: [ { attribute_name: "namespace", attribute_type: "S" }, { attribute_name: "key", attribute_type: "S" } - ] + ], + provisioned_throughput: { + read_capacity_units: 1, + write_capacity_units: 1 + } } client.create_table(req) From c71bbec59a1b76f933c21f9acc7d55860d1b4303 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 11:32:30 -0800 Subject: [PATCH 057/182] comments --- lib/ldclient-rb/integrations.rb | 6 ++++++ lib/ldclient-rb/integrations/dynamodb.rb | 6 +++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 029c4243..bfaed2eb 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -10,6 +10,9 @@ module Integrations # # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). # + # Note that in order to use this integration, you must first install one of the AWS SDK gems: either + # `aws-sdk-dynamodb`, or the full `aws-sdk`. + # # @since 5.5.0 # module DynamoDB @@ -19,6 +22,9 @@ module DynamoDB # # Integration with [Redis](https://redis.io/). # + # Note that in order to use this integration, you must first install the `redis` and `connection-pool` + # gems. + # # @since 5.5.0 # module Redis diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 553f54e9..66d3b583 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -7,9 +7,9 @@ module DynamoDB # # Creates a DynamoDB-backed persistent feature store. # - # To use this method, you must first have the `aws_sdk` gem installed. Then, - # put the object returned by this method into the `feature_store` property of your - # client configuration ({LaunchDarkly::Config}). + # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or + # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property + # of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) From cfe3b188df3ef64139310bc73dce03e9891c5883 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 11:48:04 -0800 Subject: [PATCH 058/182] readme --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index ead2bb6b..43819554 100644 --- a/README.md +++ b/README.md @@ -121,6 +121,11 @@ else end ``` +Database integrations +--------------------- + +Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [source code](https://github.com/launchdarkly/ruby-client-private/tree/master/lib/ldclient-rb/integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. + Using flag data from a file --------------------------- For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. @@ -153,9 +158,9 @@ About LaunchDarkly * [JavaScript](http://docs.launchdarkly.com/docs/js-sdk-reference "LaunchDarkly JavaScript SDK") * [PHP](http://docs.launchdarkly.com/docs/php-sdk-reference "LaunchDarkly PHP SDK") * [Python](http://docs.launchdarkly.com/docs/python-sdk-reference "LaunchDarkly Python SDK") - * [Python Twisted](http://docs.launchdarkly.com/docs/python-twisted-sdk-reference "LaunchDarkly Python Twisted SDK") * [Go](http://docs.launchdarkly.com/docs/go-sdk-reference "LaunchDarkly Go SDK") * [Node.JS](http://docs.launchdarkly.com/docs/node-sdk-reference "LaunchDarkly Node SDK") + * [Electron](http://docs.launchdarkly.com/docs/electron-sdk-reference "LaunchDarkly Electron SDK") * [.NET](http://docs.launchdarkly.com/docs/dotnet-sdk-reference "LaunchDarkly .Net SDK") * [Ruby](http://docs.launchdarkly.com/docs/ruby-sdk-reference "LaunchDarkly Ruby SDK") * [iOS](http://docs.launchdarkly.com/docs/ios-sdk-reference "LaunchDarkly iOS SDK") From 69cf890825ab41a5529242b0f4cb90f46bb81a5b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 12:08:41 -0800 Subject: [PATCH 059/182] fix doc comment --- lib/ldclient-rb/integrations/dynamodb.rb | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 66d3b583..c9ded019 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -12,14 +12,12 @@ module DynamoDB # of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options - # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) - # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) - # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) + # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use + # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :max_connections size of the Redis connection pool # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @option opts [Object] :pool custom connection pool, if desired # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # def self.new_feature_store(table_name, opts) From 321eb6eeb247764437233f8478b5ac3c1f9e6492 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 14:45:13 -0800 Subject: [PATCH 060/182] greatly improve documentation comments --- lib/ldclient-rb.rb | 7 + lib/ldclient-rb/cache_store.rb | 1 + lib/ldclient-rb/config.rb | 291 ++++++++++++------ lib/ldclient-rb/evaluation.rb | 64 +++- lib/ldclient-rb/event_summarizer.rb | 3 + lib/ldclient-rb/events.rb | 16 + lib/ldclient-rb/expiring_cache.rb | 1 + lib/ldclient-rb/file_data_source.rb | 8 +- lib/ldclient-rb/flags_state.rb | 5 +- lib/ldclient-rb/impl.rb | 2 + .../integrations/util/store_wrapper.rb | 3 + lib/ldclient-rb/interfaces.rb | 42 +++ lib/ldclient-rb/ldclient.rb | 116 ++++--- lib/ldclient-rb/memoized_value.rb | 2 + lib/ldclient-rb/newrelic.rb | 1 + lib/ldclient-rb/non_blocking_thread_pool.rb | 6 +- lib/ldclient-rb/polling.rb | 1 + lib/ldclient-rb/requestor.rb | 3 +- lib/ldclient-rb/simple_lru_cache.rb | 1 + lib/ldclient-rb/stream.rb | 8 + lib/ldclient-rb/user_filter.rb | 1 + lib/ldclient-rb/util.rb | 1 + lib/sse_client/sse_client.rb | 7 + scripts/gendocs.sh | 9 + 24 files changed, 446 insertions(+), 153 deletions(-) create mode 100755 scripts/gendocs.sh diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index e355a304..e5477ecb 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -1,3 +1,10 @@ + +# +# Namespace for the LaunchDarkly Ruby SDK. +# +module LaunchDarkly +end + require "ldclient-rb/version" require "ldclient-rb/interfaces" require "ldclient-rb/util" diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index 0677da65..a0a50fbf 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -7,6 +7,7 @@ module LaunchDarkly # # @see https://github.com/plataformatec/faraday-http-cache # @see https://github.com/ruby-concurrency/thread_safe + # @private # class ThreadSafeMemoryStore # diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index dc89d30a..e16e998a 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -8,66 +8,35 @@ module LaunchDarkly # # class Config + # rubocop:disable Metrics/AbcSize, Metrics/PerceivedComplexity + # # Constructor for creating custom LaunchDarkly configurations. # # @param opts [Hash] the configuration options - # @option opts [Logger] :logger A logger to use for messages from the - # LaunchDarkly client. Defaults to the Rails logger in a Rails - # environment, or stdout otherwise. - # @option opts [String] :base_uri ("https://app.launchdarkly.com") The base - # URL for the LaunchDarkly server. Most users should use the default value. - # @option opts [String] :stream_uri ("https://stream.launchdarkly.com") The - # URL for the LaunchDarkly streaming events server. Most users should use the default value. - # @option opts [String] :events_uri ("https://events.launchdarkly.com") The - # URL for the LaunchDarkly events server. Most users should use the default value. - # @option opts [Integer] :capacity (10000) The capacity of the events - # buffer. The client buffers up to this many events in memory before - # flushing. If the capacity is exceeded before the buffer is flushed, - # events will be discarded. - # @option opts [Float] :flush_interval (30) The number of seconds between - # flushes of the event buffer. - # @option opts [Float] :read_timeout (10) The read timeout for network - # connections in seconds. - # @option opts [Float] :connect_timeout (2) The connect timeout for network - # connections in seconds. - # @option opts [Object] :cache_store A cache store for the Faraday HTTP caching - # library. Defaults to the Rails cache in a Rails environment, or a - # thread-safe in-memory store otherwise. - # @option opts [Object] :feature_store A store for feature flags and related data. Defaults to an in-memory - # cache, or you can use RedisFeatureStore. - # @option opts [Boolean] :use_ldd (false) Whether you are using the LaunchDarkly relay proxy in - # daemon mode. In this configuration, the client will not use a streaming connection to listen - # for updates, but instead will get feature state from a Redis instance. The `stream` and - # `poll_interval` options will be ignored if this option is set to true. - # @option opts [Boolean] :offline (false) Whether the client should be initialized in - # offline mode. In offline mode, default values are returned for all flags and no - # remote network requests are made. - # @option opts [Float] :poll_interval (30) The number of seconds between polls for flag updates - # if streaming is off. - # @option opts [Boolean] :stream (true) Whether or not the streaming API should be used to receive flag updates. - # Streaming should only be disabled on the advice of LaunchDarkly support. - # @option opts [Boolean] all_attributes_private (false) If true, all user attributes (other than the key) - # will be private, not just the attributes specified in `private_attribute_names`. - # @option opts [Array] :private_attribute_names Marks a set of attribute names private. Any users sent to - # LaunchDarkly with this configuration active will have attributes with these names removed. - # @option opts [Boolean] :send_events (true) Whether or not to send events back to LaunchDarkly. - # This differs from `offline` in that it affects only the sending of client-side events, not - # streaming or polling for events from the server. - # @option opts [Integer] :user_keys_capacity (1000) The number of user keys that the event processor - # can remember at any one time, so that duplicate user details will not be sent in analytics events. - # @option opts [Float] :user_keys_flush_interval (300) The interval in seconds at which the event - # processor will reset its set of known user keys. - # @option opts [Boolean] :inline_users_in_events (false) Whether to include full user details in every - # analytics event. By default, events will only include the user key, except for one "index" event - # that provides the full details for the user. - # @option opts [Object] :update_processor (DEPRECATED) An object that will receive feature flag data from - # LaunchDarkly. Defaults to either the streaming or the polling processor, can be customized for tests. - # @option opts [Object] :update_processor_factory A function that takes the SDK and configuration object - # as parameters, and returns an object that can obtain feature flag data and put it into the feature - # store. Defaults to creating either the streaming or the polling processor, can be customized for tests. - # @return [type] [description] - # rubocop:disable Metrics/AbcSize, Metrics/PerceivedComplexity + # @option opts [Logger] :logger See {#logger}. + # @option opts [String] :base_uri ("https://app.launchdarkly.com") See {#base_uri}. + # @option opts [String] :stream_uri ("https://stream.launchdarkly.com") See {#stream_uri}. + # @option opts [String] :events_uri ("https://events.launchdarkly.com") See {#events_uri}. + # @option opts [Integer] :capacity (10000) See {#capacity}. + # @option opts [Float] :flush_interval (30) See {#flush_interval}. + # @option opts [Float] :read_timeout (10) See {#read_timeout}. + # @option opts [Float] :connect_timeout (2) See {#connect_timeout}. + # @option opts [Object] :cache_store See {#cache_store}. + # @option opts [Object] :feature_store See {#feature_store}. + # @option opts [Boolean] :use_ldd (false) See {#use_ldd?}. + # @option opts [Boolean] :offline (false) See {#offline?}. + # @option opts [Float] :poll_interval (30) See {#poll_interval}. + # @option opts [Boolean] :stream (true) See {#stream?}. + # @option opts [Boolean] all_attributes_private (false) See {#all_attributes_private}. + # @option opts [Array] :private_attribute_names See {#private_attribute_names}. + # @option opts [Boolean] :send_events (true) See {#send_events}. + # @option opts [Integer] :user_keys_capacity (1000) See {#user_keys_capacity}. + # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. + # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. + # @option opts [Object] :update_processor See {#update_processor}. + # @option opts [Object] :update_processor_factory See {#update_processor_factory}. + # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @stream_uri = (opts[:stream_uri] || Config.default_stream_uri).chomp("/") @@ -95,43 +64,56 @@ def initialize(opts = {}) end # - # The base URL for the LaunchDarkly server. + # The base URL for the LaunchDarkly server. This is configurable mainly for testing + # purposes; most users should use the default value. + # @return [String] # - # @return [String] The configured base URL for the LaunchDarkly server. attr_reader :base_uri # - # The base URL for the LaunchDarkly streaming server. + # The base URL for the LaunchDarkly streaming server. This is configurable mainly for testing + # purposes; most users should use the default value. + # @return [String] # - # @return [String] The configured base URL for the LaunchDarkly streaming server. attr_reader :stream_uri # - # The base URL for the LaunchDarkly events server. + # The base URL for the LaunchDarkly events server. This is configurable mainly for testing + # purposes; most users should use the default value. + # @return [String] # - # @return [String] The configured base URL for the LaunchDarkly events server. attr_reader :events_uri # # Whether streaming mode should be enabled. Streaming mode asynchronously updates - # feature flags in real-time using server-sent events. + # feature flags in real-time using server-sent events. Streaming is enabled by default, and + # should only be disabled on the advice of LaunchDarkly support. + # @return [Boolean] # - # @return [Boolean] True if streaming mode should be enabled def stream? @stream end # - # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, we do - # not use polling or streaming to get feature flag updates from the server, but instead - # read them from a Redis instance that is updated by the proxy. + # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, the client does not + # use polling or streaming to get feature flag updates from the server, but instead reads them + # from the {#feature_store feature store}, which is assumed to be a database that is populated by + # a LaunchDarkly relay proxy. For more information, see ["The relay proxy"](https://docs.launchdarkly.com/v2.0/docs/the-relay-proxy) + # and ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # + # All other properties related to streaming or polling are ignored if this option is set to true. + # + # @return [Boolean] # - # @return [Boolean] True if using the LaunchDarkly relay proxy in daemon mode def use_ldd? @use_ldd end - # TODO docs + # + # Whether the client should be initialized in offline mode. In offline mode, default values are + # returned for all flags and no remote network requests are made. + # @return [Boolean] + # def offline? @offline end @@ -139,20 +121,23 @@ def offline? # # The number of seconds between flushes of the event buffer. Decreasing the flush interval means # that the event buffer is less likely to reach capacity. + # @return [Float] # - # @return [Float] The configured number of seconds between flushes of the event buffer. attr_reader :flush_interval # # The number of seconds to wait before polling for feature flag updates. This option has no - # effect unless streaming is disabled + # effect unless streaming is disabled. + # @return [Float] + # attr_reader :poll_interval # # The configured logger for the LaunchDarkly client. The client library uses the log to - # print warning and error messages. + # print warning and error messages. If not specified, this defaults to the Rails logger + # in a Rails environment, or stdout otherwise. + # @return [Logger] # - # @return [Logger] The configured logger attr_reader :logger # @@ -161,114 +146,208 @@ def offline? # the buffer is flushed, events will be discarded. # Increasing the capacity means that events are less likely to be discarded, # at the cost of consuming more memory. + # @return [Integer] # - # @return [Integer] The configured capacity of the event buffer attr_reader :capacity # - # The store for the Faraday HTTP caching library. Stores should respond to - # 'read' and 'write' requests. + # A store for HTTP caching. This must support the semantics used by the + # [`faraday-http-cache`](https://github.com/plataformatec/faraday-http-cache) gem. Defaults + # to the Rails cache in a Rails environment, or a thread-safe in-memory store otherwise. + # @return [Object] # - # @return [Object] The configured store for the Faraday HTTP caching library. attr_reader :cache_store # - # The read timeout for network connections in seconds. + # The read timeout for network connections in seconds. This does not apply to the streaming + # connection, which uses a longer timeout since the server does not send data constantly. + # @return [Float] # - # @return [Float] The read timeout in seconds. attr_reader :read_timeout # # The connect timeout for network connections in seconds. + # @return [Float] # - # @return [Float] The connect timeout in seconds. attr_reader :connect_timeout # - # A store for feature flag configuration rules. + # A store for feature flags and related data. The client uses it to store all data received + # from LaunchDarkly, and uses the last stored data when evaluating flags. Defaults to + # {InMemoryFeatureStore}; for other implementations, see {LaunchDarkly::Integrations}. + # + # For more information, see ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # + # @return [LaunchDarkly::Interfaces::FeatureStore] # attr_reader :feature_store - # The proxy configuration string + # + # The proxy configuration string. + # @return [String] # attr_reader :proxy + # + # True if all user attributes (other than the key) should be considered private. This means + # that the attribute values will not be sent to LaunchDarkly in analytics events and will not + # appear on the LaunchDarkly dashboard. + # @return [Boolean] + # @see #private_attribute_names + # attr_reader :all_attributes_private + # + # A list of user attribute names that should always be considered private. This means that the + # attribute values will not be sent to LaunchDarkly in analytics events and will not appear on + # the LaunchDarkly dashboard. + # + # You can also specify the same behavior for an individual flag evaluation by storing an array + # of attribute names in the `:privateAttributeNames` property (note camelcase name) of the + # user object. + # + # @return [Array] + # @see #all_attributes_private + # attr_reader :private_attribute_names # - # Whether to send events back to LaunchDarkly. + # Whether to send events back to LaunchDarkly. This differs from {#offline?} in that it affects + # only the sending of client-side events, not streaming or polling for events from the server. + # @return [Boolean] # attr_reader :send_events # - # The number of user keys that the event processor can remember at any one time, so that - # duplicate user details will not be sent in analytics events. + # The number of user keys that the event processor can remember at any one time. This reduces the + # amount of duplicate user details sent in analytics events. + # @return [Integer] + # @see #user_keys_flush_interval # attr_reader :user_keys_capacity # # The interval in seconds at which the event processor will reset its set of known user keys. + # @return [Float] + # @see #user_keys_capacity # attr_reader :user_keys_flush_interval # - # Whether to include full user details in every - # analytics event. By default, events will only include the user key, except for one "index" event - # that provides the full details for the user. + # Whether to include full user details in every analytics event. By default, events will only + # include the user key, except for one "index" event that provides the full details for the user. + # The only reason to change this is if you are using the Analytics Data Stream. + # @return [Boolean] # attr_reader :inline_users_in_events + # + # An object that is responsible for receiving feature flag data from LaunchDarkly. By default, + # the client uses its standard polling or streaming implementation; this is customizable for + # testing purposes. + # @return [LaunchDarkly::Interfaces::UpdateProcessor] + # @deprecated The preferred way to set this is now with {#update_processor_factory}. + # attr_reader :update_processor + # + # Factory for an object that is responsible for receiving feature flag data from LaunchDarkly + # By default, the client uses its standard polling or streaming implementation; this is + # customizable for testing purposes. + # + # The factory is a lambda or Proc that takes two parameters: the SDK key and the {Config}. It + # must return an object that conforms to {LaunchDarkly::Interfaces::UpdateProcessor}. + # + # @return [lambda] + # @see FileDataSource + # attr_reader :update_processor_factory - + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. - # # @return [Config] The default LaunchDarkly configuration. + # def self.default Config.new end + # + # The default value for {#capacity}. + # @return [Integer] 10000 + # def self.default_capacity 10000 end + # + # The default value for {#base_uri}. + # @return [String] "https://app.launchdarkly.com" + # def self.default_base_uri "https://app.launchdarkly.com" end + # + # The default value for {#stream_uri}. + # @return [String] "https://stream.launchdarkly.com" + # def self.default_stream_uri "https://stream.launchdarkly.com" end + # + # The default value for {#events_uri}. + # @return [String] "https://events.launchdarkly.com" + # def self.default_events_uri "https://events.launchdarkly.com" end + # + # The default value for {#cache_store}. + # @return [Object] the Rails cache if in Rails, or a simple in-memory implementation otherwise + # def self.default_cache_store defined?(Rails) && Rails.respond_to?(:cache) ? Rails.cache : ThreadSafeMemoryStore.new end + # + # The default value for {#flush_interval}. + # @return [Float] 10 + # def self.default_flush_interval 10 end + # + # The default value for {#read_timeout}. + # @return [Float] 10 + # def self.default_read_timeout 10 end + # + # The default value for {#connect_timeout}. + # @return [Float] 10 + # def self.default_connect_timeout 2 end + # + # The default value for {#proxy}. + # @return [String] nil + # def self.default_proxy nil end + # + # The default value for {#logger}. + # @return [::Logger] the Rails logger if in Rails, or a default [::Logger] at WARN level otherwise + # def self.default_logger if defined?(Rails) && Rails.respond_to?(:logger) Rails.logger @@ -279,34 +358,66 @@ def self.default_logger end end + # + # The default value for {#stream?}. + # @return [Boolean] true + # def self.default_stream true end + # + # The default value for {#use_ldd?}. + # @return [Boolean] false + # def self.default_use_ldd false end + # + # The default value for {#feature_store}. + # @return [LaunchDarkly::Interfaces::FeatureStore] an {InMemoryFeatureStore} + # def self.default_feature_store InMemoryFeatureStore.new end + # + # The default value for {#offline?}. + # @return [Boolean] false + # def self.default_offline false end + # + # The default value for {#poll_interval}. + # @return [Float] 30 + # def self.default_poll_interval 30 end + # + # The default value for {#send_events}. + # @return [Boolean] true + # def self.default_send_events true end + # + # The default value for {#user_keys_capacity}. + # @return [Integer] 1000 + # def self.default_user_keys_capacity 1000 end + # + # The default value for {#user_keys_flush_interval}. + # @return [Float] 300 + # def self.default_user_keys_flush_interval 300 end diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index f73eb1ed..f873a6e3 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -2,7 +2,7 @@ require "semantic" module LaunchDarkly - # An object returned by `LDClient.variation_detail`, combining the result of a flag evaluation with + # An object returned by {LDClient#variation_detail}, combining the result of a flag evaluation with # an explanation of how it was calculated. class EvaluationDetail def initialize(value, variation_index, reason) @@ -11,19 +11,66 @@ def initialize(value, variation_index, reason) @reason = reason end - # @return [Object] The result of the flag evaluation. This will be either one of the flag's - # variations or the default value that was passed to the `variation` method. + # + # The result of the flag evaluation. This will be either one of the flag's variations, or the + # default value that was passed to {LDClient#variation_detail}. It is the same as the return + # value of {LDClient#variation}. + # + # @return [Object] + # attr_reader :value - # @return [int|nil] The index of the returned value within the flag's list of variations, e.g. - # 0 for the first variation - or `nil` if the default value was returned. + # + # The index of the returned value within the flag's list of variations. The first variation is + # 0, the second is 1, etc. This is `nil` if the default value was returned. + # + # @return [int|nil] + # attr_reader :variation_index - # @return [Hash] An object describing the main factor that influenced the flag evaluation value. + # + # An object describing the main factor that influenced the flag evaluation value. + # + # This object is currently represented as a Hash, which may have the following keys: + # + # `:kind`: The general category of reason. Possible values: + # + # * `'OFF'`: the flag was off and therefore returned its configured off value + # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules + # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag + # * `'RULE_MATCH'`: the user matched one of the flag's rules + # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation + # * `'ERROR'`: the flag could not be evaluated, so the default value was returned + # + # `:ruleIndex`: If the kind was `RULE_MATCH`, this is the positional index of the + # matched rule (0 for the first rule). + # + # `:ruleId`: If the kind was `RULE_MATCH`, this is the rule's unique identifier. + # + # `:prerequisiteKey`: If the kind was `PREREQUISITE_FAILED`, this is the flag key of + # the prerequisite flag that failed. + # + # `:errorKind`: If the kind was `ERROR`, this indicates the type of error: + # + # * `'CLIENT_NOT_READY'`: the caller tried to evaluate a flag before the client had + # successfully initialized + # * `'FLAG_NOT_FOUND'`: the caller provided a flag key that did not match any known flag + # * `'MALFORMED_FLAG'`: there was an internal inconsistency in the flag data, e.g. a + # rule specified a nonexistent variation + # * `'USER_NOT_SPECIFIED'`: the user object or user key was not provied + # * `'EXCEPTION'`: an unexpected exception stopped flag evaluation + # + # @return [Hash] + # attr_reader :reason - # @return [boolean] True if the flag evaluated to the default value rather than to one of its - # variations. + # + # Tests whether the flag evaluation returned a default value. This is the same as checking + # whether {#variation_index} is nil. + # + # @return [Boolean] + # def default_value? variation_index.nil? end @@ -33,6 +80,7 @@ def ==(other) end end + # @private module Evaluation BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] diff --git a/lib/ldclient-rb/event_summarizer.rb b/lib/ldclient-rb/event_summarizer.rb index 1c55b524..c48a400f 100644 --- a/lib/ldclient-rb/event_summarizer.rb +++ b/lib/ldclient-rb/event_summarizer.rb @@ -1,11 +1,14 @@ module LaunchDarkly + # @private EventSummary = Struct.new(:start_date, :end_date, :counters) # Manages the state of summarizable information for the EventProcessor, including the # event counters and user deduplication. Note that the methods of this class are # deliberately not thread-safe; the EventProcessor is responsible for enforcing # synchronization across both the summarizer and the event queue. + # + # @private class EventSummarizer def initialize clear diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index e19d6b02..cbae5ac5 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -9,6 +9,10 @@ module LaunchDarkly MAX_FLUSH_WORKERS = 5 CURRENT_SCHEMA_VERSION = 3 + private_constant :MAX_FLUSH_WORKERS + private_constant :CURRENT_SCHEMA_VERSION + + # @private class NullEventProcessor def add_event(event) end @@ -20,6 +24,7 @@ def stop end end + # @private class EventMessage def initialize(event) @event = event @@ -27,12 +32,15 @@ def initialize(event) attr_reader :event end + # @private class FlushMessage end + # @private class FlushUsersMessage end + # @private class SynchronousMessage def initialize @reply = Concurrent::Semaphore.new(0) @@ -47,12 +55,15 @@ def wait_for_completion end end + # @private class TestSyncMessage < SynchronousMessage end + # @private class StopMessage < SynchronousMessage end + # @private class EventProcessor def initialize(sdk_key, config, client = nil) @queue = Queue.new @@ -99,6 +110,7 @@ def wait_until_inactive end end + # @private class EventDispatcher def initialize(queue, sdk_key, config, client) @sdk_key = sdk_key @@ -252,8 +264,10 @@ def handle_response(res) end end + # @private FlushPayload = Struct.new(:events, :summary) + # @private class EventBuffer def initialize(capacity, logger) @capacity = capacity @@ -290,6 +304,7 @@ def clear end end + # @private class EventPayloadSendTask def run(sdk_key, config, client, payload, formatter) events_out = formatter.make_output_events(payload.events, payload.summary) @@ -327,6 +342,7 @@ def run(sdk_key, config, client, payload, formatter) end end + # @private class EventOutputFormatter def initialize(config) @inline_users = config.inline_users_in_events diff --git a/lib/ldclient-rb/expiring_cache.rb b/lib/ldclient-rb/expiring_cache.rb index 6d8c48f8..fa6051c9 100644 --- a/lib/ldclient-rb/expiring_cache.rb +++ b/lib/ldclient-rb/expiring_cache.rb @@ -6,6 +6,7 @@ module LaunchDarkly # * made thread-safe # * removed many unused methods # * reading a key does not reset its expiration time, only writing + # @private class ExpiringCache def initialize(max_size, ttl) @max_size = max_size diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index da80f26a..120276fc 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -7,12 +7,15 @@ module LaunchDarkly # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' # gem has been provided by the host app. + # @private @@have_listen = false begin require 'listen' @@have_listen = true rescue LoadError end + + # @private def self.have_listen? @@have_listen end @@ -45,7 +48,7 @@ def self.have_listen? # to request existing flags directly from the LaunchDarkly server in JSON format, and use this # output as the starting point for your file. In Linux you would do this: # - # curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all # # The output will look something like this (but with many more properties): # @@ -92,6 +95,8 @@ def self.have_listen? # duplicate key-- it will not load flags from any of the files. # class FileDataSource + include LaunchDarkly::Interfaces::UpdateProcessor + # # Returns a factory for the file data source component. # @@ -116,6 +121,7 @@ def self.factory(options={}) end end + # @private class FileDataSourceImpl def initialize(feature_store, logger, options={}) @feature_store = feature_store diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index b761149c..4efe1404 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -3,8 +3,8 @@ module LaunchDarkly # # A snapshot of the state of all feature flags with regard to a specific user, generated by - # calling the client's all_flags_state method. Serializing this object to JSON using - # JSON.generate (or the to_json method) will produce the appropriate data structure for + # calling the {LDClient#all_flags_state}. Serializing this object to JSON using + # `JSON.generate` (or the `to_json` method) will produce the appropriate data structure for # bootstrapping the LaunchDarkly JavaScript client. # class FeatureFlagsState @@ -15,6 +15,7 @@ def initialize(valid) end # Used internally to build the state map. + # @private def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = false) key = flag[:key] @flag_values[key] = value diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb index 85079baf..3df0d7e3 100644 --- a/lib/ldclient-rb/impl.rb +++ b/lib/ldclient-rb/impl.rb @@ -4,6 +4,8 @@ module LaunchDarkly # Low-level implementation classes. Everything in this module should be considered non-public # and subject to change with any release. # + # @since 5.5.0 + # module Impl # code is in ldclient-rb/impl/ end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 58ecb2c4..46a648c1 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -153,6 +153,7 @@ module FeatureStoreCore # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities + # @return [void] # def init_internal(all_data) end @@ -214,6 +215,8 @@ def initialized_internal? # # Performs any necessary cleanup to shut down the store when the client is being shut down. # + # @return [void] + # def stop end end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 510e1636..c9c38cfe 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -35,6 +35,7 @@ module FeatureStore # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities + # @return [void] # def init(all_data) end @@ -67,6 +68,7 @@ def all(kind) # # @param kind [Object] the kind of entity to add or update # @param item [Hash] the entity to add or update + # @return [void] # def upsert(kind, item) end @@ -79,6 +81,7 @@ def upsert(kind, item) # @param kind [Object] the kind of entity to delete # @param key [String] the unique key of the entity # @param version [Integer] the entity must have a lower version than this to be deleted + # @return [void] # def delete(kind, key, version) end @@ -98,6 +101,45 @@ def initialized? # # Performs any necessary cleanup to shut down the store when the client is being shut down. # + # @return [void] + # + def stop + end + end + + # + # Mixin that defines the required methods of an update processor implementation. This is + # the component that delivers feature flag data from LaunchDarkly to the LDClient by putting + # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. + # + # The client has its own standard implementation, which uses either a streaming connection or + # polling depending on your configuration. Normally you will not need to use another one + # except for testing purposes. {FileDataSource} provides one such test fixture. + # + module UpdateProcessor + # + # Checks whether the processor has finished initializing. Initialization is considered done + # once it has received one complete data set from LaunchDarkly. + # + # @return [Boolean] true if initialization is complete + # + def initialized? + end + + # + # Puts the processor into an active state. Normally this means it will make its first + # connection attempt to LaunchDarkly. If `start` has already been called, calling it again + # should simply return the same value as the first call. + # + # @return [Concurrent::Event] an Event which will be set once initialization is complete + # + def start + end + + # + # Puts the processor into an inactive state and releases all of its resources. + # This state should be considered permanent (`start` does not have to work after `stop`). + # def stop end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index f8a75780..ffd82084 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -10,7 +10,6 @@ module LaunchDarkly # A client for LaunchDarkly. Client instances are thread-safe. Users # should create a single client instance for the lifetime of the application. # - # class LDClient include Evaluation # @@ -18,7 +17,6 @@ class LDClient # configuration parameter can also supplied to specify advanced options, # but for most use cases, the default configuration is appropriate. # - # # @param sdk_key [String] the SDK key for your LaunchDarkly account # @param config [Config] an optional client configuration object # @@ -57,15 +55,41 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) end end + # + # Tells the client that all pending analytics events should be delivered as soon as possible. + # + # When the LaunchDarkly client generates analytics events (from {#variation}, {#variation_detail}, + # {#identify}, or {#track}), they are queued on a worker thread. The event thread normally + # sends all queued events to LaunchDarkly at regular intervals, controlled by the + # {Config#flush_interval} option. Calling `flush` triggers a send without waiting for the + # next interval. + # + # Flushing is asynchronous, so this method will return before it is complete. However, if you + # call {#close}, events are guaranteed to be sent before that method returns. + # def flush @event_processor.flush end - def toggle?(key, user, default = False) + # + # @param key [String] the feature flag key + # @param user [Hash] the user properties + # @param default [Boolean] (false) the value to use if the flag cannot be evaluated + # @return [Boolean] the flag value + # @deprecated Use {#variation} instead. + # + def toggle?(key, user, default = false) @config.logger.warn { "[LDClient] toggle? is deprecated. Use variation instead" } variation(key, user, default) end + # + # Creates a hash string that can be used by the JavaScript SDK to identify a user. + # For more information, see ["Secure mode"](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). + # + # @param user [Hash] the user properties + # @return [String] a hash string + # def secure_mode_hash(user) OpenSSL::HMAC.hexdigest("sha256", @sdk_key, user[:key].to_s) end @@ -78,13 +102,13 @@ def initialized? # # Determines the variation of a feature flag to present to a user. At a minimum, - # the user hash should contain a +:key+ . + # the user hash should contain a `:key`. # # @example Basic user hash # {key: "user@example.com"} # - # For authenticated users, the +:key+ should be the unique identifier for - # your user. For anonymous users, the +:key+ should be a session identifier + # For authenticated users, the `:key` should be the unique identifier for + # your user. For anonymous users, the `:key` should be a session identifier # or cookie. In either case, the only requirement is that the key # is unique to a user. # @@ -93,7 +117,7 @@ def initialized? # @example More complete user hash # {key: "user@example.com", ip: "127.0.0.1", country: "US"} # - # The user hash can contain arbitrary custom attributes stored in a +:custom+ sub-hash: + # The user hash can contain arbitrary custom attributes stored in a `:custom` sub-hash: # # @example A user hash with custom attributes # {key: "user@example.com", custom: {customer_rank: 1000, groups: ["google", "microsoft"]}} @@ -113,66 +137,61 @@ def variation(key, user, default) end # - # Determines the variation of a feature flag for a user, like `variation`, but also + # Determines the variation of a feature flag for a user, like {#variation}, but also # provides additional information about how this value was calculated. # - # The return value of `variation_detail` is an `EvaluationDetail` object, which has - # three properties: - # - # `value`: the value that was calculated for this user (same as the return value - # of `variation`) - # - # `variation_index`: the positional index of this value in the flag, e.g. 0 for the - # first variation - or `nil` if the default value was returned - # - # `reason`: a hash describing the main reason why this value was selected. Its `:kind` - # property will be one of the following: - # - # * `'OFF'`: the flag was off and therefore returned its configured off value - # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules - # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag - # * `'RULE_MATCH'`: the user matched one of the flag's rules; the `:ruleIndex` and - # `:ruleId` properties indicate the positional index and unique identifier of the rule - # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one - # prerequisite flag that either was off or did not return the desired variation; the - # `:prerequisiteKey` property indicates the key of the prerequisite that failed - # * `'ERROR'`: the flag could not be evaluated, e.g. because it does not exist or due - # to an unexpected error, and therefore returned the default value; the `:errorKind` - # property describes the nature of the error, such as `'FLAG_NOT_FOUND'` + # The return value of `variation_detail` is an {EvaluationDetail} object, which has + # three properties: the result value, the positional index of this value in the flag's + # list of variations, and an object describing the main reason why this value was + # selected. See {EvaluationDetail} for more on these properties. # - # The `reason` will also be included in analytics events, if you are capturing - # detailed event data for this flag. + # Calling `variation_detail` instead of `variation` also causes the "reason" data to + # be included in analytics events, if you are capturing detailed event data for this flag. # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag # @param default the default value of the flag # - # @return an `EvaluationDetail` object describing the result + # @return [EvaluationDetail] an object describing the result # def variation_detail(key, user, default) evaluate_internal(key, user, default, true) end # - # Registers the user + # Registers the user. This method simply creates an analytics event containing the user + # properties, so that LaunchDarkly will know about that user if it does not already. # - # @param [Hash] The user to register + # Calling {#variation} or {#variation_detail} also sends the user information to + # LaunchDarkly (if events are enabled), so you only need to use {#identify} if you + # want to identify the user without evaluating a flag. # + # Note that event delivery is asynchronous, so the event may not actually be sent + # until later; see {#flush}. + # + # @param user [Hash] The user to register; this can have all the same user properties + # described in {#variation} # @return [void] + # def identify(user) sanitize_user(user) @event_processor.add_event(kind: "identify", key: user[:key], user: user) end # - # Tracks that a user performed an event + # Tracks that a user performed an event. This method creates a "custom" analytics event + # containing the specified event name (key), user properties, and optional data. + # + # Note that event delivery is asynchronous, so the event may not actually be sent + # until later; see {#flush}. # # @param event_name [String] The name of the event - # @param user [Hash] The user that performed the event. This should be the same user hash used in calls to {#toggle?} + # @param user [Hash] The user to register; this can have all the same user properties + # described in {#variation} # @param data [Hash] A hash containing any additional data associated with the event - # # @return [void] + # def track(event_name, user, data) sanitize_user(user) @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) @@ -181,7 +200,7 @@ def track(event_name, user, data) # # Returns all feature flag values for the given user. This method is deprecated - please use # {#all_flags_state} instead. Current versions of the client-side SDK will not generate analytics - # events correctly if you pass the result of all_flags. + # events correctly if you pass the result of `all_flags`. # # @param user [Hash] The end user requesting the feature flags # @return [Hash] a hash of feature flag keys to values @@ -191,21 +210,21 @@ def all_flags(user) end # - # Returns a FeatureFlagsState object that encapsulates the state of all feature flags for a given user, + # Returns a {FeatureFlagsState} object that encapsulates the state of all feature flags for a given user, # including the flag values and also metadata that can be used on the front end. This method does not # send analytics events back to LaunchDarkly. # # @param user [Hash] The end user requesting the feature flags - # @param options={} [Hash] Optional parameters to control how the state is generated + # @param options [Hash] Optional parameters to control how the state is generated # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included - # in the state (see `variation_detail`). By default, they are not included. + # in the state (see {#variation_detail}). By default, they are not included. # @option options [Boolean] :details_only_for_tracked_flags (false) True if any flag metadata that is - # normally only used for event generation - such as flag versions and evaluation reasons - should be - # omitted for any flag that does not have event tracking or debugging turned on. This reduces the size - # of the JSON data if you are passing the flag state to the front end. - # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON + # normally only used for event generation - such as flag versions and evaluation reasons - should be + # omitted for any flag that does not have event tracking or debugging turned on. This reduces the size + # of the JSON data if you are passing the flag state to the front end. + # @return [FeatureFlagsState] a {FeatureFlagsState} object which can be serialized to JSON # def all_flags_state(user, options={}) return FeatureFlagsState.new(false) if @config.offline? @@ -246,7 +265,7 @@ def all_flags_state(user, options={}) end # - # Releases all network connections and other resources held by the client, making it no longer usable + # Releases all network connections and other resources held by the client, making it no longer usable. # # @return [void] def close @@ -351,6 +370,7 @@ def make_feature_event(flag, user, detail, default, with_reasons) # # Used internally when the client is offline. + # @private # class NullUpdateProcessor def start diff --git a/lib/ldclient-rb/memoized_value.rb b/lib/ldclient-rb/memoized_value.rb index 3ba766a6..ddddb7e0 100644 --- a/lib/ldclient-rb/memoized_value.rb +++ b/lib/ldclient-rb/memoized_value.rb @@ -2,6 +2,8 @@ module LaunchDarkly # Simple implementation of a thread-safe memoized value whose generator function will never be # run more than once, and whose value can be overridden by explicit assignment. + # Note that we no longer use this class and it will be removed in a future version. + # @private class MemoizedValue def initialize(&generator) @generator = generator diff --git a/lib/ldclient-rb/newrelic.rb b/lib/ldclient-rb/newrelic.rb index ed6eb4e4..5c9b7d48 100644 --- a/lib/ldclient-rb/newrelic.rb +++ b/lib/ldclient-rb/newrelic.rb @@ -1,4 +1,5 @@ module LaunchDarkly + # @private class LDNewRelic begin require "newrelic_rpm" diff --git a/lib/ldclient-rb/non_blocking_thread_pool.rb b/lib/ldclient-rb/non_blocking_thread_pool.rb index 81b7ea14..28ec42a9 100644 --- a/lib/ldclient-rb/non_blocking_thread_pool.rb +++ b/lib/ldclient-rb/non_blocking_thread_pool.rb @@ -3,10 +3,10 @@ require "concurrent/executors" require "thread" -# Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather -# than blocking. Also provides a way to wait for all jobs to finish without shutting down. - module LaunchDarkly + # Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather + # than blocking. Also provides a way to wait for all jobs to finish without shutting down. + # @private class NonBlockingThreadPool def initialize(capacity) @capacity = capacity diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 4ecd93f8..4c6769f3 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -2,6 +2,7 @@ require "thread" module LaunchDarkly + # @private class PollingProcessor def initialize(config, requestor) @config = config diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 25cce121..3e244fbe 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -3,7 +3,7 @@ require "faraday/http_cache" module LaunchDarkly - + # @private class UnexpectedResponseError < StandardError def initialize(status) @status = status @@ -14,6 +14,7 @@ def status end end + # @private class Requestor def initialize(sdk_key, config) @sdk_key = sdk_key diff --git a/lib/ldclient-rb/simple_lru_cache.rb b/lib/ldclient-rb/simple_lru_cache.rb index 64b1a709..4eda4e27 100644 --- a/lib/ldclient-rb/simple_lru_cache.rb +++ b/lib/ldclient-rb/simple_lru_cache.rb @@ -2,6 +2,7 @@ module LaunchDarkly # A non-thread-safe implementation of a LRU cache set with only add and reset methods. # Based on https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/cache.rb + # @private class SimpleLRUCacheSet def initialize(capacity) @values = {} diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 2151e945..660d7063 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -3,18 +3,26 @@ require "sse_client" module LaunchDarkly + # @private PUT = :put + # @private PATCH = :patch + # @private DELETE = :delete + # @private INDIRECT_PUT = :'indirect/put' + # @private INDIRECT_PATCH = :'indirect/patch' + # @private READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes + # @private KEY_PATHS = { FEATURES => "/flags/", SEGMENTS => "/segments/" } + # @private class StreamProcessor def initialize(sdk_key, config, requestor) @sdk_key = sdk_key diff --git a/lib/ldclient-rb/user_filter.rb b/lib/ldclient-rb/user_filter.rb index 449d8d2e..8cbf67ca 100644 --- a/lib/ldclient-rb/user_filter.rb +++ b/lib/ldclient-rb/user_filter.rb @@ -2,6 +2,7 @@ require "set" module LaunchDarkly + # @private class UserFilter def initialize(config) @all_attributes_private = config.all_attributes_private diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 707ba3ce..e303e18a 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,5 +1,6 @@ module LaunchDarkly + # @private module Util def self.log_exception(logger, message, exc) logger.error { "[LDClient] #{message}: #{exc.inspect}" } diff --git a/lib/sse_client/sse_client.rb b/lib/sse_client/sse_client.rb index 9f285360..5b7e0fd9 100644 --- a/lib/sse_client/sse_client.rb +++ b/lib/sse_client/sse_client.rb @@ -3,6 +3,13 @@ require "thread" require "uri" +# +# A lightweight Server-Sent Events implementation based on the `socketry` gem. +# +# This module will be moved to a separate gem in the future. +# +# @private +# module SSE # # A lightweight Server-Sent Events implementation, relying on two gems: socketry for sockets with diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh new file mode 100755 index 00000000..6280355e --- /dev/null +++ b/scripts/gendocs.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +gem install --conservative yard +gem install --conservative redcarpet # provides Markdown formatting + +# yard doesn't seem to do recursive directories, even though Ruby's Dir.glob supposedly recurses for "**" +PATHS="lib/*.rb lib/**/*.rb lib/**/**/*.rb lib/**/**/**/*.rb" + +yard doc --no-private --markup markdown --markup-provider redcarpet --embed-mixins $PATHS - README.md From 59759545c5e227f810655598f16e825b4903315e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 15:04:00 -0800 Subject: [PATCH 061/182] comment fixes --- lib/ldclient-rb/file_data_source.rb | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 120276fc..adc32ab6 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -25,8 +25,8 @@ def self.have_listen? # used in a test environment, to operate using a predetermined feature flag state without an # actual LaunchDarkly connection. # - # To use this component, call `FileDataSource.factory`, and store its return value in the - # `update_processor_factory` property of your LaunchDarkly client configuration. In the options + # To use this component, call {FileDataSource#factory}, and store its return value in the + # {Config#update_processor_factory} property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # # factory = FileDataSource.factory(paths: [ myFilePath ]) @@ -34,21 +34,23 @@ def self.have_listen? # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled - # this with Config.send_events or Config.offline. + # this with {Config#send_events} or {Config#offline?}. # # Flag data files can be either JSON or YAML. They contain an object with three possible # properties: # - # - "flags": Feature flag definitions. - # - "flagValues": Simplified feature flags that contain only a value. - # - "segments": User segment definitions. + # - `flags`: Feature flag definitions. + # - `flagValues`: Simplified feature flags that contain only a value. + # - `segments`: User segment definitions. # - # The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application # and is subject to change. Rather than trying to construct these objects yourself, it is simpler # to request existing flags directly from the LaunchDarkly server in JSON format, and use this # output as the starting point for your file. In Linux you would do this: # - # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all + # ``` + # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all + # ``` # # The output will look something like this (but with many more properties): # @@ -95,8 +97,6 @@ def self.have_listen? # duplicate key-- it will not load flags from any of the files. # class FileDataSource - include LaunchDarkly::Interfaces::UpdateProcessor - # # Returns a factory for the file data source component. # @@ -113,6 +113,7 @@ class FileDataSource # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching # mechanism from 'listen' is not being used. The default value is 1 second. + # @return an object that can be stored in {Config#update_processor_factory} # def self.factory(options={}) return Proc.new do |sdk_key, config| From 414af9957bdf1897c399d8131bcfe04d027b0c89 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 15:08:43 -0800 Subject: [PATCH 062/182] change name of "update processor" to "data source" --- lib/ldclient-rb/config.rb | 28 ++++++++++++++-------------- lib/ldclient-rb/file_data_source.rb | 12 +++++------- lib/ldclient-rb/interfaces.rb | 12 ++++++------ lib/ldclient-rb/ldclient.rb | 18 +++++++++--------- spec/file_data_source_spec.rb | 4 ++-- spec/ldclient_spec.rb | 8 ++++---- 6 files changed, 40 insertions(+), 42 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index e16e998a..64ad7378 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -34,8 +34,9 @@ class Config # @option opts [Integer] :user_keys_capacity (1000) See {#user_keys_capacity}. # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. - # @option opts [Object] :update_processor See {#update_processor}. - # @option opts [Object] :update_processor_factory See {#update_processor_factory}. + # @option opts [Object] :data_source See {#data_source}. + # @option opts [Object] :update_processor Obsolete synonym for `data_source`. + # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -59,6 +60,7 @@ def initialize(opts = {}) @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false + @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] @update_processor = opts[:update_processor] @update_processor_factory = opts[:update_processor_factory] end @@ -245,22 +247,20 @@ def offline? # An object that is responsible for receiving feature flag data from LaunchDarkly. By default, # the client uses its standard polling or streaming implementation; this is customizable for # testing purposes. - # @return [LaunchDarkly::Interfaces::UpdateProcessor] - # @deprecated The preferred way to set this is now with {#update_processor_factory}. # - attr_reader :update_processor - - # - # Factory for an object that is responsible for receiving feature flag data from LaunchDarkly - # By default, the client uses its standard polling or streaming implementation; this is - # customizable for testing purposes. - # - # The factory is a lambda or Proc that takes two parameters: the SDK key and the {Config}. It - # must return an object that conforms to {LaunchDarkly::Interfaces::UpdateProcessor}. + # This may be set to either an object that conforms to {LaunchDarkly::Interfaces::DataSource}, + # or a lambda (or Proc) that takes two parameters-- SDK key and {Config}-- and returns such an + # object. # - # @return [lambda] + # @return [LaunchDarkly::Interfaces::DataSource|lambda] # @see FileDataSource # + attr_reader :data_source + + # @deprecated This is replaced by {#data_source}. + attr_reader :update_processor + + # @deprecated This is replaced by {#data_source}. attr_reader :update_processor_factory # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index adc32ab6..7606c1d3 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -26,11 +26,11 @@ def self.have_listen? # actual LaunchDarkly connection. # # To use this component, call {FileDataSource#factory}, and store its return value in the - # {Config#update_processor_factory} property of your LaunchDarkly client configuration. In the options + # {Config#data_source} property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # - # factory = FileDataSource.factory(paths: [ myFilePath ]) - # config = LaunchDarkly::Config.new(update_processor_factory: factory) + # file_source = FileDataSource.factory(paths: [ myFilePath ]) + # config = LaunchDarkly::Config.new(data_source: file_source) # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled @@ -113,12 +113,10 @@ class FileDataSource # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching # mechanism from 'listen' is not being used. The default value is 1 second. - # @return an object that can be stored in {Config#update_processor_factory} + # @return an object that can be stored in {Config#data_source} # def self.factory(options={}) - return Proc.new do |sdk_key, config| - FileDataSourceImpl.new(config.feature_store, config.logger, options) - end + return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) } end end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index c9c38cfe..912472b5 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -108,17 +108,17 @@ def stop end # - # Mixin that defines the required methods of an update processor implementation. This is - # the component that delivers feature flag data from LaunchDarkly to the LDClient by putting + # Mixin that defines the required methods of a data source implementation. This is the + # component that delivers feature flag data from LaunchDarkly to the LDClient by putting # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. # # The client has its own standard implementation, which uses either a streaming connection or # polling depending on your configuration. Normally you will not need to use another one # except for testing purposes. {FileDataSource} provides one such test fixture. # - module UpdateProcessor + module DataSource # - # Checks whether the processor has finished initializing. Initialization is considered done + # Checks whether the data source has finished initializing. Initialization is considered done # once it has received one complete data set from LaunchDarkly. # # @return [Boolean] true if initialization is complete @@ -127,7 +127,7 @@ def initialized? end # - # Puts the processor into an active state. Normally this means it will make its first + # Puts the data source into an active state. Normally this means it will make its first # connection attempt to LaunchDarkly. If `start` has already been called, calling it again # should simply return the same value as the first call. # @@ -137,7 +137,7 @@ def start end # - # Puts the processor into an inactive state and releases all of its resources. + # Puts the data source into an inactive state and releases all of its resources. # This state should be considered permanent (`start` does not have to work after `stop`). # def stop diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index ffd82084..868c65bd 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -37,19 +37,19 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) return # requestor and update processor are not used in this mode end - if @config.update_processor - @update_processor = @config.update_processor + data_source_or_factory = @config.data_source || self.method(:create_default_data_source) + if data_source_or_factory.respond_to? :call + @data_source = data_source_or_factory.call(sdk_key, config) else - factory = @config.update_processor_factory || self.method(:create_default_update_processor) - @update_processor = factory.call(sdk_key, config) + @data_source = data_source_or_factory end - ready = @update_processor.start + ready = @data_source.start if wait_for_sec > 0 ok = ready.wait(wait_for_sec) if !ok @config.logger.error { "[LDClient] Timeout encountered waiting for LaunchDarkly client initialization" } - elsif !@update_processor.initialized? + elsif !@data_source.initialized? @config.logger.error { "[LDClient] LaunchDarkly client initialization failed" } end end @@ -97,7 +97,7 @@ def secure_mode_hash(user) # Returns whether the client has been initialized and is ready to serve feature flag requests # @return [Boolean] true if the client has been initialized def initialized? - @config.offline? || @config.use_ldd? || @update_processor.initialized? + @config.offline? || @config.use_ldd? || @data_source.initialized? end # @@ -270,14 +270,14 @@ def all_flags_state(user, options={}) # @return [void] def close @config.logger.info { "[LDClient] Closing LaunchDarkly client..." } - @update_processor.stop + @data_source.stop @event_processor.stop @store.stop end private - def create_default_update_processor(sdk_key, config) + def create_default_data_source(sdk_key, config) if config.offline? return NullUpdateProcessor.new end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 60107e26..28a0c06f 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -219,7 +219,7 @@ def test_auto_reload(options) it "evaluates simplified flag with client as expected" do file = make_temp_file(all_properties_json) factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) - config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) begin @@ -233,7 +233,7 @@ def test_auto_reload(options) it "evaluates full flag with client as expected" do file = make_temp_file(all_properties_json) factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) - config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) begin diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 262f53f9..b3a9592c 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -7,8 +7,8 @@ let(:offline_client) do subject.new("secret", offline_config) end - let(:update_processor) { LaunchDarkly::NullUpdateProcessor.new } - let(:config) { LaunchDarkly::Config.new({send_events: false, update_processor: update_processor}) } + let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } + let(:config) { LaunchDarkly::Config.new({send_events: false, data_source: null_data}) } let(:client) do subject.new("secret", config) end @@ -357,7 +357,7 @@ def event_processor end describe 'with send_events: false' do - let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, update_processor: update_processor}) } + let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, data_source: null_data}) } let(:client) { subject.new("secret", config) } it "uses a NullEventProcessor" do @@ -367,7 +367,7 @@ def event_processor end describe 'with send_events: true' do - let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, update_processor: update_processor}) } + let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, data_source: null_data}) } let(:client_with_events) { subject.new("secret", config_with_events) } it "does not use a NullEventProcessor" do From fdb0291849c5faca7c4b8b5a644f342945b8fbb0 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 21 Dec 2018 11:37:38 -0800 Subject: [PATCH 063/182] default dynamodb_opts to {} --- lib/ldclient-rb/impl/integrations/dynamodb_impl.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index 8eb1dd2a..ebaa0445 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -41,7 +41,7 @@ def initialize(table_name, opts) if !opts[:existing_client].nil? @client = opts[:existing_client] else - @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts]) + @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") From 65ee009c9cef4ae3066b5faa41b67119a9c85ba5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 21 Dec 2018 12:47:15 -0800 Subject: [PATCH 064/182] fix Unicode handling in polling requests --- lib/ldclient-rb/requestor.rb | 2 +- .../sse_shared.rb => http_util.rb} | 44 ++++++---- spec/requestor_spec.rb | 82 ++++++++++--------- spec/sse_client/sse_client_spec.rb | 24 +++++- spec/sse_client/streaming_http_spec.rb | 3 +- 5 files changed, 99 insertions(+), 56 deletions(-) rename spec/{sse_client/sse_shared.rb => http_util.rb} (56%) diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 3e244fbe..8922e82c 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -20,7 +20,7 @@ def initialize(sdk_key, config) @sdk_key = sdk_key @config = config @client = Faraday.new do |builder| - builder.use :http_cache, store: @config.cache_store + builder.use :http_cache, store: @config.cache_store, serializer: Marshal builder.adapter :net_http_persistent end diff --git a/spec/sse_client/sse_shared.rb b/spec/http_util.rb similarity index 56% rename from spec/sse_client/sse_shared.rb rename to spec/http_util.rb index 3ecabb57..434cafc8 100644 --- a/spec/sse_client/sse_shared.rb +++ b/spec/http_util.rb @@ -4,23 +4,28 @@ require "webrick/https" class StubHTTPServer + attr_reader :requests + def initialize @port = 50000 begin - @server = create_server(@port) + base_opts = { + BindAddress: '127.0.0.1', + Port: @port, + AccessLog: [], + Logger: NullLogger.new, + RequestCallback: method(:record_request) + } + @server = create_server(@port, base_opts) rescue Errno::EADDRINUSE @port += 1 retry end + @requests = [] end - def create_server(port) - WEBrick::HTTPServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new - ) + def create_server(port, base_opts) + WEBrick::HTTPServer.new(base_opts) end def start @@ -38,6 +43,19 @@ def base_uri def setup_response(uri_path, &action) @server.mount_proc(uri_path, action) end + + def setup_ok_response(uri_path, body, content_type=nil, headers={}) + setup_response(uri_path) do |req, res| + res.status = 200 + res.content_type = content_type if !content_type.nil? + res.body = body + headers.each { |n, v| res[n] = v } + end + end + + def record_request(req, res) + @requests.push(req) + end end class StubProxyServer < StubHTTPServer @@ -49,19 +67,15 @@ def initialize @request_count = 0 end - def create_server(port) - WEBrick::HTTPProxyServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new, + def create_server(port, base_opts) + WEBrick::HTTPProxyServer.new(base_opts.merge({ ProxyContentHandler: proc do |req,res| if !@connect_status.nil? res.status = @connect_status end @request_count += 1 end - ) + })) end end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index b7838200..7f2b8ad7 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -1,52 +1,58 @@ +require "http_util" require "spec_helper" -require "faraday" describe LaunchDarkly::Requestor do describe ".request_all_flags" do describe "with a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :proxy => "http://proxy.com", - :base_uri => "http://ld.com" - }) - ) - } it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy[:uri]).to eq URI("http://proxy.com") - double(body: '{"foo": "bar"}', status: 200, headers: {}) + content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + with_server(StubProxyServer.new) do |proxy| + config = LaunchDarkly::Config.new(base_uri: server.base_uri.to_s, proxy: proxy.base_uri.to_s) + r = LaunchDarkly::Requestor.new("sdk-key", config) + result = r.request_all_data + expect(result).to eq(JSON.parse(content, symbolize_names: true)) + end end - - requestor.request_all_data() end end describe "without a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :base_uri => "http://ld.com" - }) - ) - } - it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy).to eq nil - double(body: '{"foo": "bar"}', status: 200, headers: {}) + it "sends headers" do + content = '{"flags": {}}' + sdk_key = 'sdk-key' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + r = LaunchDarkly::Requestor.new(sdk_key, LaunchDarkly::Config.new({ base_uri: server.base_uri.to_s })) + r.request_all_data + expect(server.requests.length).to eq 1 + req = server.requests[0] + expect(req.header['authorization']).to eq [sdk_key] + expect(req.header['user-agent']).to eq ["RubyClient/" + LaunchDarkly::VERSION] + end + end + + it "receives data" do + content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + r = LaunchDarkly::Requestor.new("sdk-key", LaunchDarkly::Config.new({ base_uri: server.base_uri.to_s })) + result = r.request_all_data + expect(result).to eq(JSON.parse(content, symbolize_names: true)) + end + end + + it "handles Unicode content" do + content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["blue", "grėeń"]}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + # Note that the ETag header here is important because without it, the HTTP cache will not be used, + # and the cache is what required a fix to handle Unicode properly. See: + # https://github.com/launchdarkly/ruby-client/issues/90 + r = LaunchDarkly::Requestor.new("sdk-key", LaunchDarkly::Config.new({ base_uri: server.base_uri.to_s })) + result = r.request_all_data + expect(result).to eq(JSON.parse(content, symbolize_names: true)) end - requestor.request_all_data() end end end diff --git a/spec/sse_client/sse_client_spec.rb b/spec/sse_client/sse_client_spec.rb index 54f1f5c7..3adca889 100644 --- a/spec/sse_client/sse_client_spec.rb +++ b/spec/sse_client/sse_client_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of SSEClient against a real server @@ -70,6 +70,28 @@ def with_client(client) end end + it "handles Unicode correctly (assuming UTF-8)" do + please = "proszę" + thank_you = "dziękuję" + events_body = <<-EOT +event: #{please} +data: #{thank_you} + +EOT + with_server do |server| + server.setup_ok_response("/", events_body, "text/event-stream") + + event_sink = Queue.new + client = subject.new(server.base_uri) do |c| + c.on_event { |event| event_sink << event } + end + + with_client(client) do |client| + expect(event_sink.pop).to eq(SSE::SSEEvent.new(please.to_sym, thank_you, nil)) + end + end + end + it "reconnects after error response" do events_body = <<-EOT event: go diff --git a/spec/sse_client/streaming_http_spec.rb b/spec/sse_client/streaming_http_spec.rb index 7dfac9bd..fbe60b96 100644 --- a/spec/sse_client/streaming_http_spec.rb +++ b/spec/sse_client/streaming_http_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of HTTP requests against a real server @@ -119,6 +119,7 @@ def with_connection(cxn) end it "throws error if proxy responds with error status" do + body = "hi" with_server do |server| server.setup_response("/") do |req,res| res.body = body From 86820ea710d8698b21b78ac093487c918e26bcbe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 15:31:44 -0800 Subject: [PATCH 065/182] initial Consul implementation --- .circleci/config.yml | 16 +++ ldclient-rb.gemspec | 1 + .../impl/integrations/consul_impl.rb | 132 ++++++++++++++++++ lib/ldclient-rb/integrations.rb | 12 ++ lib/ldclient-rb/integrations/consul.rb | 37 +++++ .../integrations/consul_feature_store_spec.rb | 37 +++++ 6 files changed, 235 insertions(+) create mode 100644 lib/ldclient-rb/impl/integrations/consul_impl.rb create mode 100644 lib/ldclient-rb/integrations/consul.rb create mode 100644 spec/integrations/consul_feature_store_spec.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index f19ae7bc..45540d63 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -33,30 +33,35 @@ jobs: <<: *ruby-docker-template docker: - image: circleci/ruby:2.2.9-jessie + - image: consul - image: redis - image: amazon/dynamodb-local test-2.3: <<: *ruby-docker-template docker: - image: circleci/ruby:2.3.6-jessie + - image: consul - image: redis - image: amazon/dynamodb-local test-2.4: <<: *ruby-docker-template docker: - image: circleci/ruby:2.4.4-stretch + - image: consul - image: redis - image: amazon/dynamodb-local test-2.5: <<: *ruby-docker-template docker: - image: circleci/ruby:2.5.1-stretch + - image: consul - image: redis - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: - image: circleci/jruby:9-jdk + - image: consul - image: redis - image: amazon/dynamodb-local @@ -93,8 +98,19 @@ jobs: mv Gemfile.lock "Gemfile.lock.$i" done - run: + name: start DynamoDB command: docker run -p 8000:8000 amazon/dynamodb-local background: true + - run: + name: download Consul + command: wget https://releases.hashicorp.com/consul/0.8.0/consul_0.8.0_linux_amd64.zip + - run: + name: extract Consul + command: unzip consul_0.8.0_linux_amd64.zip + - run: + name: start Consul + command: ./consul agent -dev + background: true - run: name: run tests for all versions shell: /bin/bash -leo pipefail diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 8b1f4cc7..35fbf45c 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -25,6 +25,7 @@ Gem::Specification.new do |spec| spec.add_development_dependency "bundler", "~> 1.7" spec.add_development_dependency "rspec", "~> 3.2" spec.add_development_dependency "codeclimate-test-reporter", "~> 0" + spec.add_development_dependency "diplomat", ">= 2.0.2" spec.add_development_dependency "redis", "~> 3.3.5" spec.add_development_dependency "connection_pool", ">= 2.1.2" spec.add_development_dependency "rake", "~> 10.0" diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb new file mode 100644 index 00000000..48d308c2 --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -0,0 +1,132 @@ +require "json" + +module LaunchDarkly + module Impl + module Integrations + module Consul + # + # Internal implementation of the Consul feature store, intended to be used with CachingStoreWrapper. + # + class ConsulFeatureStoreCore + begin + require "diplomat" + CONSUL_ENABLED = true + rescue ScriptError, StandardError + CONSUL_ENABLED = false + end + + def initialize(opts) + if !CONSUL_ENABLED + raise RuntimeError.new("can't use Consul feature store without the 'diplomat' gem") + end + + @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' + @logger = opts[:logger] || Config.default_logger + @client = Diplomat::Kv.new(configuration: opts[:consul_config]) + + @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") + end + + def init_internal(all_data) + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + unused_old_keys = set() + unused_old_keys.merge(@client.get(@prefix, keys: true, recurse: true)) + + ops = [] + num_items = 0 + + # Insert or update every provided item + all_data.each do |kind, items| + items.values.each do |item| + value = item.to_json + key = item_key(kind, item[:key]) + ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => value } }) + unused_old_keys.delete(key) + num_items = num_items + 1 + end + end + + # Now delete any previously existing items whose keys were not in the current data + unused_old_keys.each do |tuple| + ops.push({ 'KV' => { 'Verb' => 'delete', 'Key' => key } }) + end + + # Now set the special key that we check in initialized_internal? + ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => '' } }) + + ConsulUtil.batch_operations(ops) + + @logger.info { "Initialized database with #{num_items} items" } + end + + def get_internal(kind, key) + + resp = get_item_by_keys(namespace_for_kind(kind), key) + unmarshal_item(resp.item) + end + + def get_all_internal(kind) + items_out = {} + + items_out + end + + def upsert_internal(kind, new_item) + + end + + def initialized_internal? + + end + + def stop + # There's no way to close the Consul client + end + + private + + def item_key(kind, key) + kind_key(kind) + '/' + key + end + + def kind_key(kind) + @prefix + kind[:namespace] + end + + def inited_key + @prefix + '$inited' + end + + def marshal_item(kind, item) + make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ + VERSION_ATTRIBUTE => item[:version], + ITEM_JSON_ATTRIBUTE => item.to_json + }) + end + + def unmarshal_item(item) + return nil if item.nil? || item.length == 0 + json_attr = item[ITEM_JSON_ATTRIBUTE] + raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? + JSON.parse(json_attr, symbolize_names: true) + end + end + + class ConsulUtil + # + # Submits as many transactions as necessary to submit all of the given operations. + # The ops array is consumed. + # + def self.batch_write_requests(ops) + batch_size = 64 # Consul can only do this many at a time + while true + chunk = requests.shift(batch_size) + break if chunk.empty? + Diplomat::Kv.txn(chunk) + end + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index bfaed2eb..8c9f6249 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/integrations/consul" require "ldclient-rb/integrations/dynamodb" require "ldclient-rb/integrations/redis" require "ldclient-rb/integrations/util/store_wrapper" @@ -7,6 +8,17 @@ module LaunchDarkly # Tools for connecting the LaunchDarkly client to other software. # module Integrations + # + # Integration with [Consul](https://www.consul.io/). + # + # Note that in order to use this integration, you must first install the gem `diplomat`. + # + # @since 5.5.0 + # + module Consul + # code is in ldclient-rb/impl/integrations/consul_impl + end + # # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). # diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb new file mode 100644 index 00000000..7450d3b9 --- /dev/null +++ b/lib/ldclient-rb/integrations/consul.rb @@ -0,0 +1,37 @@ +require "ldclient-rb/impl/integrations/consul_impl" +require "ldclient-rb/integrations/util/store_wrapper" + +module LaunchDarkly + module Integrations + module Consul + # + # Default value for the `prefix` option for {new_feature_store}. + # + # @return [String] the default key prefix + # + def self.default_prefix + 'launchdarkly' + end + + # + # Creates a Consul-backed persistent feature store. + # + # To use this method, you must first install the gem `diplomat`. Then, put the object returned by + # this method into the `feature_store` property of your client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default + # Consul client configuration + # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def self.new_feature_store(opts, &block) + core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts) + return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + end + end + end +end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb new file mode 100644 index 00000000..1aa6f919 --- /dev/null +++ b/spec/integrations/consul_feature_store_spec.rb @@ -0,0 +1,37 @@ +require "feature_store_spec_base" +#require "diplomat" +require "spec_helper" + + +$my_prefix = 'testprefix' +$null_log = ::Logger.new($stdout) +$null_log.level = ::Logger::FATAL + +$base_opts = { + prefix: $my_prefix, + logger: $null_log +} + +def create_consul_store(opts = {}) + LaunchDarkly::Integrations::Consul::new_feature_store( + opts.merge($base_opts).merge({ expiration: 60 })) +end + +def create_consul_store_uncached(opts = {}) + LaunchDarkly::Integrations::Consul::new_feature_store( + opts.merge($base_opts).merge({ expiration: 0 })) +end + + +describe "Consul feature store" do + + # These tests will all fail if there isn't a local Consul instance running. + + context "with local cache" do + include_examples "feature_store", method(:create_consul_store) + end + + context "without local cache" do + include_examples "feature_store", method(:create_consul_store_uncached) + end +end From 485a73dfdc23c25ff56db7fcdaf5ccf417df1579 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 15:45:12 -0800 Subject: [PATCH 066/182] use new SSE gem --- ldclient-rb.gemspec | 5 +- lib/ldclient-rb/stream.rb | 24 ++- lib/sse_client.rb | 4 - lib/sse_client/backoff.rb | 38 ---- lib/sse_client/sse_client.rb | 178 ----------------- lib/sse_client/sse_events.rb | 67 ------- lib/sse_client/streaming_http.rb | 199 ------------------- spec/sse_client/sse_client_spec.rb | 177 ----------------- spec/sse_client/sse_events_spec.rb | 100 ---------- spec/sse_client/sse_shared.rb | 82 -------- spec/sse_client/streaming_http_spec.rb | 263 ------------------------- spec/stream_spec.rb | 36 ++-- 12 files changed, 34 insertions(+), 1139 deletions(-) delete mode 100644 lib/sse_client.rb delete mode 100644 lib/sse_client/backoff.rb delete mode 100644 lib/sse_client/sse_client.rb delete mode 100644 lib/sse_client/sse_events.rb delete mode 100644 lib/sse_client/streaming_http.rb delete mode 100644 spec/sse_client/sse_client_spec.rb delete mode 100644 spec/sse_client/sse_events_spec.rb delete mode 100644 spec/sse_client/sse_shared.rb delete mode 100644 spec/sse_client/streaming_http_spec.rb diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 8b1f4cc7..9f7c5089 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -13,7 +13,7 @@ Gem::Specification.new do |spec| spec.summary = "LaunchDarkly SDK for Ruby" spec.description = "Official LaunchDarkly SDK for Ruby" spec.homepage = "https://github.com/launchdarkly/ruby-client" - spec.license = "Apache 2.0" + spec.license = "Apache-2.0" spec.files = `git ls-files -z`.split("\x0") spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } @@ -40,6 +40,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "net-http-persistent", "~> 2.9" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0.4" spec.add_runtime_dependency "hashdiff", "~> 0.2" - spec.add_runtime_dependency "http_tools", '~> 0.4.5' - spec.add_runtime_dependency "socketry", "~> 0.5.1" + spec.add_runtime_dependency "ld-eventsource", '~> 1.0' end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 660d7063..adc4bf59 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -1,6 +1,6 @@ require "concurrent/atomics" require "json" -require "sse_client" +require "ld-eventsource" module LaunchDarkly # @private @@ -54,15 +54,18 @@ def start read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } - @es = SSE::SSEClient.new(@config.stream_uri + "/all", opts) do |conn| - conn.on_event { |event| process_message(event, event.type) } + @es = LaunchDarklySSE::SSEClient.new(@config.stream_uri + "/all", **opts) do |conn| + conn.on_event { |event| process_message(event) } conn.on_error { |err| - status = err[:status_code] - message = Util.http_error_message(status, "streaming connection", "will retry") - @config.logger.error { "[LDClient] #{message}" } - if !Util.http_error_recoverable?(status) - @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set - stop + case err + when LaunchDarklySSE::HTTPError + status = err.status + message = Util.http_error_message(status, "streaming connection", "will retry") + @config.logger.error { "[LDClient] #{message}" } + if !Util.http_error_recoverable?(status) + @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set + stop + end end } end @@ -79,7 +82,8 @@ def stop private - def process_message(message, method) + def process_message(message) + method = message.type @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } if method == PUT message = JSON.parse(message.data, symbolize_names: true) diff --git a/lib/sse_client.rb b/lib/sse_client.rb deleted file mode 100644 index dd24c3a6..00000000 --- a/lib/sse_client.rb +++ /dev/null @@ -1,4 +0,0 @@ -require "sse_client/streaming_http" -require "sse_client/sse_events" -require "sse_client/backoff" -require "sse_client/sse_client" diff --git a/lib/sse_client/backoff.rb b/lib/sse_client/backoff.rb deleted file mode 100644 index 73e0754f..00000000 --- a/lib/sse_client/backoff.rb +++ /dev/null @@ -1,38 +0,0 @@ - -module SSE - # - # A simple backoff algorithm that can be reset at any time, or reset itself after a given - # interval has passed without errors. - # - class Backoff - def initialize(base_interval, max_interval, auto_reset_interval = 60) - @base_interval = base_interval - @max_interval = max_interval - @auto_reset_interval = auto_reset_interval - @attempts = 0 - @last_good_time = nil - @jitter_rand = Random.new - end - - attr_accessor :base_interval - - def next_interval - if !@last_good_time.nil? && (Time.now.to_i - @last_good_time) >= @auto_reset_interval - @attempts = 0 - end - @last_good_time = nil - if @attempts == 0 - @attempts += 1 - return 0 - end - @last_good_time = nil - target = ([@base_interval * (2 ** @attempts), @max_interval].min).to_f - @attempts += 1 - (target / 2) + @jitter_rand.rand(target / 2) - end - - def mark_success - @last_good_time = Time.now.to_i if @last_good_time.nil? - end - end -end diff --git a/lib/sse_client/sse_client.rb b/lib/sse_client/sse_client.rb deleted file mode 100644 index 5b7e0fd9..00000000 --- a/lib/sse_client/sse_client.rb +++ /dev/null @@ -1,178 +0,0 @@ -require "concurrent/atomics" -require "logger" -require "thread" -require "uri" - -# -# A lightweight Server-Sent Events implementation based on the `socketry` gem. -# -# This module will be moved to a separate gem in the future. -# -# @private -# -module SSE - # - # A lightweight Server-Sent Events implementation, relying on two gems: socketry for sockets with - # read timeouts, and http_tools for HTTP response parsing. The overall logic is based on - # [https://github.com/Tonkpils/celluloid-eventsource]. - # - class SSEClient - DEFAULT_CONNECT_TIMEOUT = 10 - DEFAULT_READ_TIMEOUT = 300 - DEFAULT_RECONNECT_TIME = 1 - MAX_RECONNECT_TIME = 30 - - def initialize(uri, options = {}) - @uri = URI(uri) - @stopped = Concurrent::AtomicBoolean.new(false) - - @headers = options[:headers] ? options[:headers].clone : {} - @connect_timeout = options[:connect_timeout] || DEFAULT_CONNECT_TIMEOUT - @read_timeout = options[:read_timeout] || DEFAULT_READ_TIMEOUT - @logger = options[:logger] || default_logger - - if options[:proxy] - @proxy = options[:proxy] - else - proxyUri = @uri.find_proxy - if !proxyUri.nil? && (proxyUri.scheme == 'http' || proxyUri.scheme == 'https') - @proxy = proxyUri - end - end - - reconnect_time = options[:reconnect_time] || DEFAULT_RECONNECT_TIME - @backoff = Backoff.new(reconnect_time, MAX_RECONNECT_TIME) - - @on = { event: ->(_) {}, error: ->(_) {} } - @last_id = nil - - yield self if block_given? - - Thread.new do - run_stream - end - end - - def on(event_name, &action) - @on[event_name.to_sym] = action - end - - def on_event(&action) - @on[:event] = action - end - - def on_error(&action) - @on[:error] = action - end - - def close - if @stopped.make_true - @cxn.close if !@cxn.nil? - @cxn = nil - end - end - - private - - def default_logger - log = ::Logger.new($stdout) - log.level = ::Logger::WARN - log - end - - def run_stream - while !@stopped.value - @cxn = nil - begin - @cxn = connect - # There's a potential race if close was called in the middle of the previous line, i.e. after we - # connected but before @cxn was set. Checking the variable again is a bit clunky but avoids that. - return if @stopped.value - read_stream(@cxn) if !@cxn.nil? - rescue Errno::EBADF - # don't log this - it probably means we closed our own connection deliberately - rescue StandardError => e - @logger.error { "Unexpected error from event source: #{e.inspect}" } - @logger.debug { "Exception trace: #{e.backtrace}" } - end - begin - @cxn.close if !@cxn.nil? - rescue StandardError => e - @logger.error { "Unexpected error while closing stream: #{e.inspect}" } - @logger.debug { "Exception trace: #{e.backtrace}" } - end - end - end - - # Try to establish a streaming connection. Returns the StreamingHTTPConnection object if successful. - def connect - loop do - return if @stopped.value - interval = @backoff.next_interval - if interval > 0 - @logger.warn { "Will retry connection after #{'%.3f' % interval} seconds" } - sleep(interval) - end - begin - cxn = open_connection(build_headers) - if cxn.status != 200 - body = cxn.read_all # grab the whole response body in case it has error details - cxn.close - @on[:error].call({status_code: cxn.status, body: body}) - next - elsif cxn.headers["content-type"] && cxn.headers["content-type"].start_with?("text/event-stream") - return cxn # we're good to proceed - end - @logger.error { "Event source returned unexpected content type '#{cxn.headers["content-type"]}'" } - rescue Errno::EBADF - raise - rescue StandardError => e - @logger.error { "Unexpected error from event source: #{e.inspect}" } - @logger.debug { "Exception trace: #{e.backtrace}" } - cxn.close if !cxn.nil? - end - # if unsuccessful, continue the loop to connect again - end - end - - # Just calls the StreamingHTTPConnection constructor - factored out for test purposes - def open_connection(headers) - StreamingHTTPConnection.new(@uri, @proxy, headers, @connect_timeout, @read_timeout) - end - - # Pipe the output of the StreamingHTTPConnection into the EventParser, and dispatch events as - # they arrive. - def read_stream(cxn) - event_parser = EventParser.new(cxn.read_lines) - event_parser.items.each do |item| - return if @stopped.value - case item - when SSEEvent - dispatch_event(item) - when SSESetRetryInterval - @backoff.base_interval = event.milliseconds.t-Of / 1000 - end - end - end - - def dispatch_event(event) - @last_id = event.id - - # Tell the Backoff object that as of the current time, we have succeeded in getting some data. It - # uses that information so it can automatically reset itself if enough time passes between failures. - @backoff.mark_success - - # Pass the event to the caller - @on[:event].call(event) - end - - def build_headers - h = { - 'Accept' => 'text/event-stream', - 'Cache-Control' => 'no-cache' - } - h['Last-Event-Id'] = @last_id if !@last_id.nil? - h.merge(@headers) - end - end -end diff --git a/lib/sse_client/sse_events.rb b/lib/sse_client/sse_events.rb deleted file mode 100644 index 762cc2b0..00000000 --- a/lib/sse_client/sse_events.rb +++ /dev/null @@ -1,67 +0,0 @@ - -module SSE - # Server-Sent Event type used by SSEClient and EventParser. - SSEEvent = Struct.new(:type, :data, :id) - - SSESetRetryInterval = Struct.new(:milliseconds) - - # - # Accepts lines of text via an iterator, and parses them into SSE messages. - # - class EventParser - def initialize(lines) - @lines = lines - reset_buffers - end - - # Generator that parses the input interator and returns instances of SSEEvent or SSERetryInterval. - def items - Enumerator.new do |gen| - @lines.each do |line| - line.chomp! - if line.empty? - event = maybe_create_event - reset_buffers - gen.yield event if !event.nil? - else - case line - when /^(\w+): ?(.*)$/ - item = process_field($1, $2) - gen.yield item if !item.nil? - end - end - end - end - end - - private - - def reset_buffers - @id = nil - @type = nil - @data = "" - end - - def process_field(name, value) - case name - when "event" - @type = value.to_sym - when "data" - @data << "\n" if !@data.empty? - @data << value - when "id" - @id = value - when "retry" - if /^(?\d+)$/ =~ value - return SSESetRetryInterval.new(num.to_i) - end - end - nil - end - - def maybe_create_event - return nil if @data.empty? - SSEEvent.new(@type || :message, @data, @id) - end - end -end diff --git a/lib/sse_client/streaming_http.rb b/lib/sse_client/streaming_http.rb deleted file mode 100644 index eeb80e82..00000000 --- a/lib/sse_client/streaming_http.rb +++ /dev/null @@ -1,199 +0,0 @@ -require "concurrent/atomics" -require "http_tools" -require "socketry" - -module SSE - # - # Wrapper around a socket providing a simplified HTTP request-response cycle including streaming. - # The socket is created and managed by Socketry, which we use so that we can have a read timeout. - # - class StreamingHTTPConnection - attr_reader :status, :headers - - def initialize(uri, proxy, headers, connect_timeout, read_timeout) - @socket = HTTPConnectionFactory.connect(uri, proxy, connect_timeout, read_timeout) - @socket.write(build_request(uri, headers)) - @reader = HTTPResponseReader.new(@socket, read_timeout) - @status = @reader.status - @headers = @reader.headers - @closed = Concurrent::AtomicBoolean.new(false) - end - - def close - if @closed.make_true - @socket.close if @socket - @socket = nil - end - end - - # Generator that returns one line of the response body at a time (delimited by \r, \n, - # or \r\n) until the response is fully consumed or the socket is closed. - def read_lines - @reader.read_lines - end - - # Consumes the entire response body and returns it. - def read_all - @reader.read_all - end - - private - - # Build an HTTP request line and headers. - def build_request(uri, headers) - ret = "GET #{uri.request_uri} HTTP/1.1\r\n" - ret << "Host: #{uri.host}\r\n" - headers.each { |k, v| - ret << "#{k}: #{v}\r\n" - } - ret + "\r\n" - end - end - - # - # Used internally to send the HTTP request, including the proxy dialogue if necessary. - # - class HTTPConnectionFactory - def self.connect(uri, proxy, connect_timeout, read_timeout) - if !proxy - return open_socket(uri, connect_timeout) - end - - socket = open_socket(proxy, connect_timeout) - socket.write(build_proxy_request(uri, proxy)) - - # temporarily create a reader just for the proxy connect response - proxy_reader = HTTPResponseReader.new(socket, read_timeout) - if proxy_reader.status != 200 - raise ProxyError, "proxy connection refused, status #{proxy_reader.status}" - end - - # start using TLS at this point if appropriate - if uri.scheme.downcase == 'https' - wrap_socket_in_ssl_socket(socket) - else - socket - end - end - - private - - def self.open_socket(uri, connect_timeout) - if uri.scheme.downcase == 'https' - Socketry::SSL::Socket.connect(uri.host, uri.port, timeout: connect_timeout) - else - Socketry::TCP::Socket.connect(uri.host, uri.port, timeout: connect_timeout) - end - end - - # Build a proxy connection header. - def self.build_proxy_request(uri, proxy) - ret = "CONNECT #{uri.host}:#{uri.port} HTTP/1.1\r\n" - ret << "Host: #{uri.host}:#{uri.port}\r\n" - if proxy.user || proxy.password - encoded_credentials = Base64.strict_encode64([proxy.user || '', proxy.password || ''].join(":")) - ret << "Proxy-Authorization: Basic #{encoded_credentials}\r\n" - end - ret << "\r\n" - ret - end - - def self.wrap_socket_in_ssl_socket(socket) - io = IO.try_convert(socket) - ssl_sock = OpenSSL::SSL::SSLSocket.new(io, OpenSSL::SSL::SSLContext.new) - ssl_sock.connect - Socketry::SSL::Socket.new.from_socket(ssl_sock) - end - end - - class ProxyError < StandardError - def initialize(message) - super - end - end - - # - # Used internally to read the HTTP response, either all at once or as a stream of text lines. - # Incoming data is fed into an instance of HTTPTools::Parser, which gives us the header and - # chunks of the body via callbacks. - # - class HTTPResponseReader - DEFAULT_CHUNK_SIZE = 10000 - - attr_reader :status, :headers - - def initialize(socket, read_timeout) - @socket = socket - @read_timeout = read_timeout - @parser = HTTPTools::Parser.new - @buffer = "" - @done = false - @lock = Mutex.new - - # Provide callbacks for the Parser to give us the headers and body. This has to be done - # before we start piping any data into the parser. - have_headers = false - @parser.on(:header) do - have_headers = true - end - @parser.on(:stream) do |data| - @lock.synchronize { @buffer << data } # synchronize because we're called from another thread in Socketry - end - @parser.on(:finish) do - @lock.synchronize { @done = true } - end - - # Block until the status code and headers have been successfully read. - while !have_headers - raise EOFError if !read_chunk_into_buffer - end - @headers = Hash[@parser.header.map { |k,v| [k.downcase, v] }] - @status = @parser.status_code - end - - def read_lines - Enumerator.new do |gen| - loop do - line = read_line - break if line.nil? - gen.yield line - end - end - end - - def read_all - while read_chunk_into_buffer - end - @buffer - end - - private - - # Attempt to read some more data from the socket. Return true if successful, false if EOF. - # A read timeout will result in an exception from Socketry's readpartial method. - def read_chunk_into_buffer - # If @done is set, it means the Parser has signaled end of response body - @lock.synchronize { return false if @done } - data = @socket.readpartial(DEFAULT_CHUNK_SIZE, timeout: @read_timeout) - return false if data == :eof - @parser << data - # We are piping the content through the parser so that it can handle things like chunked - # encoding for us. The content ends up being appended to @buffer via our callback. - true - end - - # Extract the next line of text from the read buffer, refilling the buffer as needed. - def read_line - loop do - @lock.synchronize do - i = @buffer.index(/[\r\n]/) - if !i.nil? - i += 1 if (@buffer[i] == "\r" && i < @buffer.length - 1 && @buffer[i + 1] == "\n") - return @buffer.slice!(0, i + 1).force_encoding(Encoding::UTF_8) - end - end - return nil if !read_chunk_into_buffer - end - end - end -end diff --git a/spec/sse_client/sse_client_spec.rb b/spec/sse_client/sse_client_spec.rb deleted file mode 100644 index 54f1f5c7..00000000 --- a/spec/sse_client/sse_client_spec.rb +++ /dev/null @@ -1,177 +0,0 @@ -require "spec_helper" -require "socketry" -require "sse_client/sse_shared" - -# -# End-to-end tests of SSEClient against a real server -# -describe SSE::SSEClient do - subject { SSE::SSEClient } - - def with_client(client) - begin - yield client - ensure - client.close - end - end - - it "sends expected headers" do - with_server do |server| - requests = Queue.new - server.setup_response("/") do |req,res| - requests << req - res.content_type = "text/event-stream" - res.status = 200 - end - - headers = { - "Authorization" => "secret" - } - - with_client(subject.new(server.base_uri, headers: headers)) do |client| - received_req = requests.pop - expect(received_req.header).to eq({ - "accept" => ["text/event-stream"], - "cache-control" => ["no-cache"], - "host" => ["127.0.0.1"], - "authorization" => ["secret"] - }) - end - end - end - - it "receives messages" do - events_body = <<-EOT -event: go -data: foo -id: 1 - -event: stop -data: bar - -EOT - with_server do |server| - server.setup_response("/") do |req,res| - res.content_type = "text/event-stream" - res.status = 200 - res.body = events_body - end - - event_sink = Queue.new - client = subject.new(server.base_uri) do |c| - c.on_event { |event| event_sink << event } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", "1")) - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:stop, "bar", nil)) - end - end - end - - it "reconnects after error response" do - events_body = <<-EOT -event: go -data: foo - -EOT - with_server do |server| - attempt = 0 - server.setup_response("/") do |req,res| - attempt += 1 - if attempt == 1 - res.status = 500 - res.body = "sorry" - res.keep_alive = false - else - res.content_type = "text/event-stream" - res.status = 200 - res.body = events_body - end - end - - event_sink = Queue.new - error_sink = Queue.new - client = subject.new(server.base_uri, reconnect_time: 0.25) do |c| - c.on_event { |event| event_sink << event } - c.on_error { |error| error_sink << error } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", nil)) - expect(error_sink.pop).to eq({ status_code: 500, body: "sorry" }) - expect(attempt).to be >= 2 - end - end - end - - it "reconnects after read timeout" do - events_body = <<-EOT -event: go -data: foo - -EOT - with_server do |server| - attempt = 0 - server.setup_response("/") do |req,res| - attempt += 1 - if attempt == 1 - sleep(2) - end - res.content_type = "text/event-stream" - res.status = 200 - res.body = events_body - end - - event_sink = Queue.new - client = subject.new(server.base_uri, - reconnect_time: 0.25, read_timeout: 0.25) do |c| - c.on_event { |event| event_sink << event } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", nil)) - expect(attempt).to be >= 2 - end - end - end - - it "reconnects if stream returns EOF" do - events_body_1 = <<-EOT -event: go -data: foo - -EOT - events_body_2 = <<-EOT -event: go -data: bar - -EOT - with_server do |server| - attempt = 0 - server.setup_response("/") do |req,res| - attempt += 1 - if attempt == 1 - res.body = events_body_1 - else - res.body = events_body_2 - end - res.content_type = "text/event-stream" - res.status = 200 - end - - event_sink = Queue.new - client = subject.new(server.base_uri, - reconnect_time: 0.25, read_timeout: 0.25) do |c| - c.on_event { |event| event_sink << event } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", nil)) - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "bar", nil)) - expect(attempt).to be >= 2 - end - end - end -end diff --git a/spec/sse_client/sse_events_spec.rb b/spec/sse_client/sse_events_spec.rb deleted file mode 100644 index 438cfa7a..00000000 --- a/spec/sse_client/sse_events_spec.rb +++ /dev/null @@ -1,100 +0,0 @@ -require "spec_helper" - -describe SSE::EventParser do - subject { SSE::EventParser } - - it "parses an event with all fields" do - lines = [ - "event: abc\r\n", - "data: def\r\n", - "id: 1\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:abc, "def", "1") - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "parses an event with only data" do - lines = [ - "data: def\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:message, "def", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "parses an event with multi-line data" do - lines = [ - "data: def\r\n", - "data: ghi\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:message, "def\nghi", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "ignores comments" do - lines = [ - ":", - "data: def\r\n", - ":", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:message, "def", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "parses reconnect interval" do - lines = [ - "retry: 2500\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_item = SSE::SSESetRetryInterval.new(2500) - output = ep.items.to_a - expect(output).to eq([ expected_item ]) - end - - it "parses multiple events" do - lines = [ - "event: abc\r\n", - "data: def\r\n", - "id: 1\r\n", - "\r\n", - "data: ghi\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event_1 = SSE::SSEEvent.new(:abc, "def", "1") - expected_event_2 = SSE::SSEEvent.new(:message, "ghi", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event_1, expected_event_2 ]) - end - - it "ignores events with no data" do - lines = [ - "event: nothing\r\n", - "\r\n", - "event: nada\r\n", - "\r\n" - ] - ep = subject.new(lines) - - output = ep.items.to_a - expect(output).to eq([]) - end -end diff --git a/spec/sse_client/sse_shared.rb b/spec/sse_client/sse_shared.rb deleted file mode 100644 index 3ecabb57..00000000 --- a/spec/sse_client/sse_shared.rb +++ /dev/null @@ -1,82 +0,0 @@ -require "spec_helper" -require "webrick" -require "webrick/httpproxy" -require "webrick/https" - -class StubHTTPServer - def initialize - @port = 50000 - begin - @server = create_server(@port) - rescue Errno::EADDRINUSE - @port += 1 - retry - end - end - - def create_server(port) - WEBrick::HTTPServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new - ) - end - - def start - Thread.new { @server.start } - end - - def stop - @server.shutdown - end - - def base_uri - URI("http://127.0.0.1:#{@port}") - end - - def setup_response(uri_path, &action) - @server.mount_proc(uri_path, action) - end -end - -class StubProxyServer < StubHTTPServer - attr_reader :request_count - attr_accessor :connect_status - - def initialize - super - @request_count = 0 - end - - def create_server(port) - WEBrick::HTTPProxyServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new, - ProxyContentHandler: proc do |req,res| - if !@connect_status.nil? - res.status = @connect_status - end - @request_count += 1 - end - ) - end -end - -class NullLogger - def method_missing(*) - self - end -end - -def with_server(server = nil) - server = StubHTTPServer.new if server.nil? - begin - server.start - yield server - ensure - server.stop - end -end diff --git a/spec/sse_client/streaming_http_spec.rb b/spec/sse_client/streaming_http_spec.rb deleted file mode 100644 index 7dfac9bd..00000000 --- a/spec/sse_client/streaming_http_spec.rb +++ /dev/null @@ -1,263 +0,0 @@ -require "spec_helper" -require "socketry" -require "sse_client/sse_shared" - -# -# End-to-end tests of HTTP requests against a real server -# -describe SSE::StreamingHTTPConnection do - subject { SSE::StreamingHTTPConnection } - - def with_connection(cxn) - begin - yield cxn - ensure - cxn.close - end - end - - it "makes HTTP connection and sends request" do - with_server do |server| - requests = Queue.new - server.setup_response("/foo") do |req,res| - requests << req - res.status = 200 - end - headers = { - "Accept" => "text/plain" - } - with_connection(subject.new(server.base_uri.merge("/foo?bar"), nil, headers, 30, 30)) do - received_req = requests.pop - expect(received_req.unparsed_uri).to eq("/foo?bar") - expect(received_req.header).to eq({ - "accept" => ["text/plain"], - "host" => [server.base_uri.host] - }) - end - end - end - - it "receives response status" do - with_server do |server| - server.setup_response("/foo") do |req,res| - res.status = 204 - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - expect(cxn.status).to eq(204) - end - end - end - - it "receives response headers" do - with_server do |server| - server.setup_response("/foo") do |req,res| - res["Content-Type"] = "application/json" - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - expect(cxn.headers["content-type"]).to eq("application/json") - end - end - end - - it "can read response as lines" do - body = <<-EOT -This is -a response -EOT - with_server do |server| - server.setup_response("/foo") do |req,res| - res.body = body - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - lines = cxn.read_lines - expect(lines.next).to eq("This is\n") - expect(lines.next).to eq("a response\n") - end - end - end - - it "can read entire response body" do - body = <<-EOT -This is -a response -EOT - with_server do |server| - server.setup_response("/foo") do |req,res| - res.body = body - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - read_body = cxn.read_all - expect(read_body).to eq("This is\na response\n") - end - end - end - - it "enforces read timeout" do - with_server do |server| - server.setup_response("/") do |req,res| - sleep(2) - res.status = 200 - end - expect { subject.new(server.base_uri, nil, {}, 30, 0.25) }.to raise_error(Socketry::TimeoutError) - end - end - - it "connects to HTTP server through proxy" do - body = "hi" - with_server do |server| - server.setup_response("/") do |req,res| - res.body = body - end - with_server(StubProxyServer.new) do |proxy| - with_connection(subject.new(server.base_uri, proxy.base_uri, {}, 30, 30)) do |cxn| - read_body = cxn.read_all - expect(read_body).to eq("hi") - expect(proxy.request_count).to eq(1) - end - end - end - end - - it "throws error if proxy responds with error status" do - with_server do |server| - server.setup_response("/") do |req,res| - res.body = body - end - with_server(StubProxyServer.new) do |proxy| - proxy.connect_status = 403 - expect { subject.new(server.base_uri, proxy.base_uri, {}, 30, 30) }.to raise_error(SSE::ProxyError) - end - end - end - - # The following 2 tests were originally written to connect to an embedded HTTPS server made with - # WEBrick. Unfortunately, some unknown problem prevents WEBrick's self-signed certificate feature - # from working in JRuby 9.1 (but not in any other Ruby version). Therefore these tests currently - # hit an external URL. - - it "connects to HTTPS server" do - with_connection(subject.new(URI("https://app.launchdarkly.com"), nil, {}, 30, 30)) do |cxn| - expect(cxn.status).to eq 200 - end - end - - it "connects to HTTPS server through proxy" do - with_server(StubProxyServer.new) do |proxy| - with_connection(subject.new(URI("https://app.launchdarkly.com"), proxy.base_uri, {}, 30, 30)) do |cxn| - expect(cxn.status).to eq 200 - expect(proxy.request_count).to eq(1) - end - end - end -end - -# -# Tests of response parsing functionality without a real HTTP request -# -describe SSE::HTTPResponseReader do - subject { SSE::HTTPResponseReader } - - let(:simple_response) { <<-EOT -HTTP/1.1 200 OK -Cache-Control: no-cache -Content-Type: text/event-stream - -line1\r -line2 -\r -EOT - } - - def make_chunks(str) - # arbitrarily split content into 5-character blocks - str.scan(/.{1,5}/m).to_enum - end - - def mock_socket_without_timeout(chunks) - mock_socket(chunks) { :eof } - end - - def mock_socket_with_timeout(chunks) - mock_socket(chunks) { raise Socketry::TimeoutError } - end - - def mock_socket(chunks) - sock = double - allow(sock).to receive(:readpartial) do - begin - chunks.next - rescue StopIteration - yield - end - end - sock - end - - it "parses status code" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.status).to eq(200) - end - - it "parses headers" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.headers).to eq({ - 'cache-control' => 'no-cache', - 'content-type' => 'text/event-stream' - }) - end - - it "can read entire response body" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.read_all).to eq("line1\r\nline2\n\r\n") - end - - it "can read response body as lines" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.read_lines.to_a).to eq([ - "line1\r\n", - "line2\n", - "\r\n" - ]) - end - - it "handles chunked encoding" do - chunked_response = <<-EOT -HTTP/1.1 200 OK -Content-Type: text/plain -Transfer-Encoding: chunked - -6\r -things\r -A\r - and stuff\r -0\r -\r -EOT - socket = mock_socket_without_timeout(make_chunks(chunked_response)) - reader = subject.new(socket, 0) - expect(reader.read_all).to eq("things and stuff") - end - - it "raises error if response ends without complete headers" do - malformed_response = <<-EOT -HTTP/1.1 200 OK -Cache-Control: no-cache -EOT - socket = mock_socket_without_timeout(make_chunks(malformed_response)) - expect { subject.new(socket, 0) }.to raise_error(EOFError) - end - - it "throws timeout if thrown by socket read" do - socket = mock_socket_with_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - lines = reader.read_lines - lines.next - lines.next - lines.next - expect { lines.next }.to raise_error(Socketry::TimeoutError) - end -end diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index df27e173..0ab9d3ec 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -1,5 +1,5 @@ +require "ld-eventsource" require "spec_helper" -require 'ostruct' describe LaunchDarkly::StreamProcessor do subject { LaunchDarkly::StreamProcessor } @@ -8,52 +8,52 @@ let(:processor) { subject.new("sdk_key", config, requestor) } describe '#process_message' do - let(:put_message) { OpenStruct.new({data: '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}'}) } - let(:patch_flag_message) { OpenStruct.new({data: '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}'}) } - let(:patch_seg_message) { OpenStruct.new({data: '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}'}) } - let(:delete_flag_message) { OpenStruct.new({data: '{"path": "/flags/key", "version": 2}'}) } - let(:delete_seg_message) { OpenStruct.new({data: '{"path": "/segments/key", "version": 2}'}) } - let(:indirect_patch_flag_message) { OpenStruct.new({data: "/flags/key"}) } - let(:indirect_patch_segment_message) { OpenStruct.new({data: "/segments/key"}) } + let(:put_message) { LaunchDarklySSE::StreamEvent.new(type: :put, data: '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } + let(:patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}') } + let(:patch_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } + let(:delete_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/flags/key", "version": 2}') } + let(:delete_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/segments/key", "version": 2}') } + let(:indirect_patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/put', data: "/flags/key") } + let(:indirect_patch_segment_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/patch', data: "/segments/key") } it "will accept PUT methods" do - processor.send(:process_message, put_message, LaunchDarkly::PUT) + processor.send(:process_message, put_message) expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") end it "will accept PATCH methods for flags" do - processor.send(:process_message, patch_flag_message, LaunchDarkly::PATCH) + processor.send(:process_message, patch_flag_message) expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) end it "will accept PATCH methods for segments" do - processor.send(:process_message, patch_seg_message, LaunchDarkly::PATCH) + processor.send(:process_message, patch_seg_message) expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) end it "will accept DELETE methods for flags" do - processor.send(:process_message, patch_flag_message, LaunchDarkly::PATCH) - processor.send(:process_message, delete_flag_message, LaunchDarkly::DELETE) + processor.send(:process_message, patch_flag_message) + processor.send(:process_message, delete_flag_message) expect(config.feature_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) end it "will accept DELETE methods for segments" do - processor.send(:process_message, patch_seg_message, LaunchDarkly::PATCH) - processor.send(:process_message, delete_seg_message, LaunchDarkly::DELETE) + processor.send(:process_message, patch_seg_message) + processor.send(:process_message, delete_seg_message) expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) end it "will accept INDIRECT PATCH method for flags" do flag = { key: 'key', version: 1 } allow(requestor).to receive(:request_flag).with(flag[:key]).and_return(flag) - processor.send(:process_message, indirect_patch_flag_message, LaunchDarkly::INDIRECT_PATCH); + processor.send(:process_message, indirect_patch_flag_message); expect(config.feature_store.get(LaunchDarkly::FEATURES, flag[:key])).to eq(flag) end it "will accept INDIRECT PATCH method for segments" do segment = { key: 'key', version: 1 } allow(requestor).to receive(:request_segment).with(segment[:key]).and_return(segment) - processor.send(:process_message, indirect_patch_segment_message, LaunchDarkly::INDIRECT_PATCH); + processor.send(:process_message, indirect_patch_segment_message); expect(config.feature_store.get(LaunchDarkly::SEGMENTS, segment[:key])).to eq(segment) end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn - processor.send(:process_message, put_message, "get") + processor.send(:process_message, LaunchDarklySSE::StreamEvent.new(type: :get, data: "", id: nil)) end end end From 85674397211e249ffad3a9d8c2b9607aa32f180f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 18:32:39 -0800 Subject: [PATCH 067/182] numerous Ruby SDK documentation fixes --- CONTRIBUTING.md | 12 --- README.md | 75 ++++++++--------- lib/ldclient-rb/config.rb | 2 +- lib/ldclient-rb/impl.rb | 5 +- .../impl/integrations/dynamodb_impl.rb | 3 - lib/ldclient-rb/in_memory_store.rb | 7 +- lib/ldclient-rb/integrations/dynamodb.rb | 20 ++++- lib/ldclient-rb/integrations/redis.rb | 11 ++- lib/ldclient-rb/interfaces.rb | 7 +- lib/ldclient-rb/ldclient.rb | 82 +++++++++++++------ lib/ldclient-rb/redis_store.rb | 2 +- lib/ldclient-rb/version.rb | 1 + scripts/gendocs.sh | 3 + 13 files changed, 140 insertions(+), 90 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 96147068..c6b8dd20 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,15 +2,3 @@ Contributing to LaunchDarkly SDK for Ruby ========================================= We encourage pull-requests and other contributions from the community. We've also published an [SDK contributor's guide](http://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. - -Dependencies ------------- -[ld-em-eventsource](https://github.com/launchdarkly/em-eventsource) - - -Style ------ - -Our pull requests have [Hound CI](https://houndci.com/) set up to do style checking. -We also run [Rubocop](https://github.com/bbatsov/rubocop). - diff --git a/README.md b/README.md index 43819554..4812690f 100644 --- a/README.md +++ b/README.md @@ -15,37 +15,37 @@ This version of the LaunchDarkly SDK has a minimum Ruby version of 2.2.6, or 9.1 Quick setup ----------- -0. Install the Ruby SDK with `gem` +1. Install the Ruby SDK with `gem` -```shell + ```shell gem install ldclient-rb ``` -1. Require the LaunchDarkly client: +2. Require the LaunchDarkly client: -```ruby + ```ruby require 'ldclient-rb' ``` -2. Create a new LDClient with your SDK key: +3. Create a new LDClient with your SDK key: -```ruby + ```ruby client = LaunchDarkly::LDClient.new("your_sdk_key") ``` ### Ruby on Rails -0. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` +1. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` -1. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: +2. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: -```ruby + ```ruby Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") ``` -2. You may want to include a function in your ApplicationController +3. You may want to include a function in your ApplicationController -```ruby + ```ruby def launchdarkly_settings if current_user.present? { @@ -72,31 +72,44 @@ Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") end ``` -3. In your controllers, access the client using +4. In your controllers, access the client using -```ruby + ```ruby Rails.application.config.ld_client.variation('your.flag.key', launchdarkly_settings, false) ``` Note that this gem will automatically switch to using the Rails logger it is detected. +Your first feature flag +----------------------- + +1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com). +2. In your application code, use the feature's key to check whether the flag is on for each user: + +```ruby +if client.variation("your.flag.key", {key: "user@test.com"}, false) + # application code to show the feature +else + # the code to run if the feature is off +end +``` + HTTPS proxy ------------- -The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. +----------- + +The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. (HTTP_PROXY is not used because all LaunchDarkly services require HTTPS.) How to set the HTTPS_PROXY environment variable on Mac/Linux systems: ``` export HTTPS_PROXY=https://web-proxy.domain.com:8080 ``` - How to set the HTTPS_PROXY environment variable on Windows systems: ``` set HTTPS_PROXY=https://web-proxy.domain.com:8080 ``` - If your proxy requires authentication then you can prefix the URN with your login information: ``` export HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 @@ -106,34 +119,22 @@ or set HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 ``` - -Your first feature flag ------------------------ - -1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com) -2. In your application code, use the feature's key to check whether the flag is on for each user: - -```ruby -if client.variation("your.flag.key", {key: "user@test.com"}, false) - # application code to show the feature -else - # the code to run if the feature is off -end -``` - Database integrations --------------------- -Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [source code](https://github.com/launchdarkly/ruby-client-private/tree/master/lib/ldclient-rb/integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the `LaunchDarkly::Integrations` module and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. Using flag data from a file --------------------------- -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. + +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See `LaunchDarkly::FileDataSource` or the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. Learn more ----------- -Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [complete reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). +Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). + +Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/ldclient-rb). Testing ------- @@ -143,10 +144,10 @@ We run integration tests for all our SDKs using a centralized test harness. This Contributing ------------ -See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md) +See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md). About LaunchDarkly ------------ +------------------ * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 64ad7378..34f4f67b 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -346,7 +346,7 @@ def self.default_proxy # # The default value for {#logger}. - # @return [::Logger] the Rails logger if in Rails, or a default [::Logger] at WARN level otherwise + # @return [Logger] the Rails logger if in Rails, or a default Logger at WARN level otherwise # def self.default_logger if defined?(Rails) && Rails.respond_to?(:logger) diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb index 3df0d7e3..b0d63ebe 100644 --- a/lib/ldclient-rb/impl.rb +++ b/lib/ldclient-rb/impl.rb @@ -1,10 +1,11 @@ module LaunchDarkly # - # Low-level implementation classes. Everything in this module should be considered non-public - # and subject to change with any release. + # Internal implementation classes. Everything in this module should be considered unsupported + # and subject to change. # # @since 5.5.0 + # @private # module Impl # code is in ldclient-rb/impl/ diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index ebaa0445..a76fae52 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -1,4 +1,3 @@ -require "concurrent/atomics" require "json" module LaunchDarkly @@ -36,8 +35,6 @@ def initialize(table_name, opts) @prefix = opts[:prefix] @logger = opts[:logger] || Config.default_logger - @stopped = Concurrent::AtomicBoolean.new(false) - if !opts[:existing_client].nil? @client = opts[:existing_client] else diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 4814c85d..f2843c1e 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -6,18 +6,21 @@ module LaunchDarkly # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. + + # @private FEATURES = { namespace: "features" }.freeze + # @private SEGMENTS = { namespace: "segments" }.freeze # # Default implementation of the LaunchDarkly client's feature store, using an in-memory - # cache. This object holds feature flags and related data received from the - # streaming API. + # cache. This object holds feature flags and related data received from LaunchDarkly. + # Database-backed implementations are available in {LaunchDarkly::Integrations}. # class InMemoryFeatureStore include LaunchDarkly::Interfaces::FeatureStore diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index c9ded019..ecd87fce 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -5,12 +5,30 @@ module LaunchDarkly module Integrations module DynamoDB # - # Creates a DynamoDB-backed persistent feature store. + # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the + # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property # of your client configuration ({LaunchDarkly::Config}). # + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::DynamoDB::new_feature_store("my-table-name") + # config = LaunchDarkly::Config.new(feature_store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) + # + # Note that the specified table must already exist in DynamoDB. It must have a partition key called + # "namespace", and a sort key called "key" (both strings). The SDK does not create the table + # automatically because it has no way of knowing what additional properties (such as permissions + # and throughput) you would want it to have. + # + # By default, the DynamoDB client will try to get your AWS credentials and region name from + # environment variables and/or local configuration files, as described in the AWS SDK documentation. + # You can also specify any supported AWS SDK options in `dynamodb_opts`-- or, provide an + # already-configured DynamoDB client in `existing_client`. + # + # @param table_name [String] name of an existing DynamoDB table # @param opts [Hash] the configuration options # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index b81097c6..34509181 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -23,11 +23,18 @@ def self.default_prefix end # - # Creates a Redis-backed persistent feature store. + # Creates a Redis-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the + # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, # put the object returned by this method into the `feature_store` property of your - # client configuration ({LaunchDarkly::Config}). + # client configuration. + # + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::Redis::new_feature_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(feature_store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # @param opts [Hash] the configuration options # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 912472b5..094ce0dd 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -17,9 +17,10 @@ module Interfaces # `:deleted`, a boolean (optional, defaults to false) that if true means this is a # placeholder for a deleted entity. # - # Examples of a "kind" are feature flags and segments; each of these is associated with an - # object such as {LaunchDarkly::FEATURES} and {LaunchDarkly::SEGMENTS}. The "kind" objects are - # hashes with a single property, `:namespace`, which is a short string unique to that kind. + # To represent the different kinds of objects that can be stored, such as feature flags and + # segments, the SDK will provide a "kind" object; this is a hash with a single property, + # `:namespace`, which is a short string unique to that kind. This string can be used as a + # collection name or a key prefix. # # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 868c65bd..5788d276 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -17,10 +17,17 @@ class LDClient # configuration parameter can also supplied to specify advanced options, # but for most use cases, the default configuration is appropriate. # + # The client will immediately attempt to connect to LaunchDarkly and retrieve + # your feature flag data. If it cannot successfully do so within the time limit + # specified by `wait_for_sec`, the constructor will return a client that is in + # an uninitialized state. See {#initialized?} for more details. + # # @param sdk_key [String] the SDK key for your LaunchDarkly account # @param config [Config] an optional client configuration object + # @param wait_for_sec [Float] maximum time (in seconds) to wait for initialization # # @return [LDClient] The LaunchDarkly client instance + # def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key @config = config @@ -85,7 +92,7 @@ def toggle?(key, user, default = false) # # Creates a hash string that can be used by the JavaScript SDK to identify a user. - # For more information, see ["Secure mode"](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). + # For more information, see [Secure mode](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). # # @param user [Hash] the user properties # @return [String] a hash string @@ -94,44 +101,61 @@ def secure_mode_hash(user) OpenSSL::HMAC.hexdigest("sha256", @sdk_key, user[:key].to_s) end - # Returns whether the client has been initialized and is ready to serve feature flag requests + # + # Returns whether the client has been initialized and is ready to serve feature flag requests. + # + # If this returns false, it means that the client did not succeed in connecting to + # LaunchDarkly within the time limit that you specified in the constructor. It could + # still succeed in connecting at a later time (on another thread), or it could have + # given up permanently (for instance, if your SDK key is invalid). In the meantime, + # any call to {#variation} or {#variation_detail} will behave as follows: + # + # 1. It will check whether the feature store already contains data (that is, you + # are using a database-backed store and it was populated by a previous run of this + # application). If so, it will use the last known feature flag data. + # + # 2. Failing that, it will return the value that you specified for the `default` + # parameter of {#variation} or {#variation_detail}. + # # @return [Boolean] true if the client has been initialized + # def initialized? @config.offline? || @config.use_ldd? || @data_source.initialized? end # - # Determines the variation of a feature flag to present to a user. At a minimum, - # the user hash should contain a `:key`. + # Determines the variation of a feature flag to present to a user. # - # @example Basic user hash - # {key: "user@example.com"} + # At a minimum, the user hash should contain a `:key`, which should be the unique + # identifier for your user (or, for an anonymous user, a session identifier or + # cookie). # - # For authenticated users, the `:key` should be the unique identifier for - # your user. For anonymous users, the `:key` should be a session identifier - # or cookie. In either case, the only requirement is that the key - # is unique to a user. + # Other supported user attributes include IP address, country code, and an arbitrary hash of + # custom attributes. For more about the supported user properties and how they work in + # LaunchDarkly, see [Targeting users](https://docs.launchdarkly.com/docs/targeting-users). + # + # The optional `:privateAttributeNames` user property allows you to specify a list of + # attribute names that should not be sent back to LaunchDarkly. + # [Private attributes](https://docs.launchdarkly.com/docs/private-user-attributes) + # can also be configured globally in {Config}. # - # You can also pass IP addresses and country codes in the user hash. + # @example Basic user hash + # {key: "my-user-id"} # # @example More complete user hash - # {key: "user@example.com", ip: "127.0.0.1", country: "US"} - # - # The user hash can contain arbitrary custom attributes stored in a `:custom` sub-hash: - # - # @example A user hash with custom attributes - # {key: "user@example.com", custom: {customer_rank: 1000, groups: ["google", "microsoft"]}} + # {key: "my-user-id", ip: "127.0.0.1", country: "US", custom: {customer_rank: 1000}} # - # Attribute values in the custom hash can be integers, booleans, strings, or - # lists of integers, booleans, or strings. + # @example User with a private attribute + # {key: "my-user-id", email: "email@example.com", privateAttributeNames: ["email"]} # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag - # @param default the default value of the flag + # @param default the default value of the flag; this is used if there is an error + # condition making it impossible to find or evaluate the flag + # + # @return the variation to show the user, or the default value if there's an an error # - # @return the variation to show the user, or the - # default value if there's an an error def variation(key, user, default) evaluate_internal(key, user, default, false).value end @@ -148,10 +172,14 @@ def variation(key, user, default) # Calling `variation_detail` instead of `variation` also causes the "reason" data to # be included in analytics events, if you are capturing detailed event data for this flag. # + # For more information, see the reference guide on + # [Evaluation reasons](https://docs.launchdarkly.com/v2.0/docs/evaluation-reasons). + # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag - # @param default the default value of the flag + # @param default the default value of the flag; this is used if there is an error + # condition making it impossible to find or evaluate the flag # # @return [EvaluationDetail] an object describing the result # @@ -198,9 +226,11 @@ def track(event_name, user, data) end # - # Returns all feature flag values for the given user. This method is deprecated - please use - # {#all_flags_state} instead. Current versions of the client-side SDK will not generate analytics - # events correctly if you pass the result of `all_flags`. + # Returns all feature flag values for the given user. + # + # @deprecated Please use {#all_flags_state} instead. Current versions of the + # client-side SDK will not generate analytics events correctly if you pass the + # result of `all_flags`. # # @param user [Hash] The end user requesting the feature flags # @return [Hash] a hash of feature flag keys to values diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 32a9507d..6ab7dd96 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -16,7 +16,7 @@ module LaunchDarkly # of your client configuration. # # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific - # implementation class may change in the future. + # implementation class may be changed or removed in the future. # class RedisFeatureStore include LaunchDarkly::Interfaces::FeatureStore diff --git a/lib/ldclient-rb/version.rb b/lib/ldclient-rb/version.rb index a70241bf..b526a871 100644 --- a/lib/ldclient-rb/version.rb +++ b/lib/ldclient-rb/version.rb @@ -1,3 +1,4 @@ module LaunchDarkly + # The current gem version. VERSION = "5.4.1" end diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh index 6280355e..1e545955 100755 --- a/scripts/gendocs.sh +++ b/scripts/gendocs.sh @@ -1,5 +1,8 @@ #!/bin/bash +# Use this script to generate documentation locally in ./doc so it can be proofed before release. +# After release, documentation will be visible at https://www.rubydoc.info/gems/ldclient-rb + gem install --conservative yard gem install --conservative redcarpet # provides Markdown formatting From 63c3680ab438609184e1593309636314ded27141 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 18:36:03 -0800 Subject: [PATCH 068/182] fix bundler version --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f19ae7bc..4d0800f1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -20,7 +20,7 @@ ruby-docker-template: &ruby-docker-template fi - run: ruby -v - run: gem install bundler - - run: bundle install + - run: bundle install -v 1.17.3 - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: @@ -88,7 +88,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler; + gem install bundler -v 1.17.3; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 008331b1cff9cda159dfaf7f0cb65873afeaec5c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 18:41:19 -0800 Subject: [PATCH 069/182] fix build --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4d0800f1..a672a100 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,8 +19,8 @@ ruby-docker-template: &ruby-docker-template gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - run: ruby -v - - run: gem install bundler - - run: bundle install -v 1.17.3 + - run: gem install bundler -v 1.17.3 + - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: From c09c166ccd78055cc2dcb7778cc4779d97350796 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 12:23:48 -0800 Subject: [PATCH 070/182] make some dependencies less strict and remove some unused ones --- ldclient-rb.gemspec | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 0b8f4f9d..46dac190 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -34,11 +34,9 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"] - spec.add_runtime_dependency "semantic", "~> 1.6.0" - spec.add_runtime_dependency "thread_safe", "~> 0.3" + spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "net-http-persistent", "~> 2.9" - spec.add_runtime_dependency "concurrent-ruby", "~> 1.0.4" - spec.add_runtime_dependency "hashdiff", "~> 0.2" + spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" end From a4ced95117f3b47b14d2048fa5e7deb1088becbd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 12:32:25 -0800 Subject: [PATCH 071/182] not using thread_safe --- lib/ldclient-rb/cache_store.rb | 12 ++++++------ lib/ldclient-rb/redis_store.rb | 1 - 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index 0677da65..164534fb 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -1,12 +1,12 @@ -require "thread_safe" +require "concurrent/map" module LaunchDarkly - # A thread-safe in-memory store suitable for use - # with the Faraday caching HTTP client. Uses the - # Threadsafe gem as the underlying cache. + # + # A thread-safe in-memory store suitable for use with the Faraday caching HTTP client. Uses the + # concurrent-ruby gem's Map as the underlying cache. # # @see https://github.com/plataformatec/faraday-http-cache - # @see https://github.com/ruby-concurrency/thread_safe + # @see https://github.com/ruby-concurrency # class ThreadSafeMemoryStore # @@ -14,7 +14,7 @@ class ThreadSafeMemoryStore # # @return [ThreadSafeMemoryStore] a new store def initialize - @cache = ThreadSafe::Cache.new + @cache = Concurrent::Map.new end # diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 3729ca6b..c9b1bc64 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -1,6 +1,5 @@ require "concurrent/atomics" require "json" -require "thread_safe" module LaunchDarkly # From 806bb8e8fb7b665eb2ac68df583fe186d9cf9ca7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 12:34:01 -0800 Subject: [PATCH 072/182] add bundler version (still not sure why we need to) --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..544bd9ae 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ ruby-docker-template: &ruby-docker-template gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - run: ruby -v - - run: gem install bundler + - run: gem install bundler -v 1.17.3 - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler; + gem install bundler -v 1.17.3; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 9d446c85cd15f7375886f922d455de6cef8c8062 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 13:02:03 -0800 Subject: [PATCH 073/182] don't need bundler version for all rubies --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 544bd9ae..d742e552 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler -v 1.17.3; + gem install bundler; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 5516745a0c16d84d2b2420b3e7b84f37f1353f5e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 13:13:59 -0800 Subject: [PATCH 074/182] fix bundler version again --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d742e552..85f6f7cf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ ruby-docker-template: &ruby-docker-template gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - run: ruby -v - - run: gem install bundler -v 1.17.3 + - run: gem install bundler -v "~> 1.7" - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler; + gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 54add1dcc64525b22a0e558eb3024e7b60adcf41 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 15:52:35 -0800 Subject: [PATCH 075/182] try to fix bundler version again --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 85f6f7cf..5a66f0ec 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,6 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi + gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From 3d4b08067de23b9fa77d061f419b788eb7bd1405 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 16:01:27 -0800 Subject: [PATCH 076/182] yet another build fix --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5a66f0ec..d08d8c0c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled + yes | gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From 635adf44c4bc9635111535f49ce16a1dd079d059 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 16:35:25 -0800 Subject: [PATCH 077/182] commit lock file to get correct bundler --- .circleci/config.yml | 1 - .gitignore | 1 - Gemfile.lock | 111 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 111 insertions(+), 2 deletions(-) create mode 100644 Gemfile.lock diff --git a/.circleci/config.yml b/.circleci/config.yml index d08d8c0c..85f6f7cf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,6 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - yes | gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" diff --git a/.gitignore b/.gitignore index bb576123..3f9d02f2 100644 --- a/.gitignore +++ b/.gitignore @@ -12,5 +12,4 @@ *.a mkmf.log *.gem -Gemfile.lock .DS_Store diff --git a/Gemfile.lock b/Gemfile.lock new file mode 100644 index 00000000..17c5725e --- /dev/null +++ b/Gemfile.lock @@ -0,0 +1,111 @@ +PATH + remote: . + specs: + ldclient-rb (5.4.1) + concurrent-ruby (~> 1.0.4) + faraday (>= 0.9, < 2) + faraday-http-cache (>= 1.3.0, < 3) + hashdiff (~> 0.2) + http_tools (~> 0.4.5) + json (>= 1.8, < 3) + net-http-persistent (~> 2.9) + semantic (~> 1.6.0) + socketry (~> 0.5.1) + thread_safe (~> 0.3) + +GEM + remote: https://rubygems.org/ + specs: + aws-eventstream (1.0.1) + aws-partitions (1.125.0) + aws-sdk-core (3.44.0) + aws-eventstream (~> 1.0) + aws-partitions (~> 1.0) + aws-sigv4 (~> 1.0) + jmespath (~> 1.0) + aws-sdk-dynamodb (1.18.0) + aws-sdk-core (~> 3, >= 3.39.0) + aws-sigv4 (~> 1.0) + aws-sigv4 (1.0.3) + codeclimate-test-reporter (0.6.0) + simplecov (>= 0.7.1, < 1.0.0) + concurrent-ruby (1.0.5) + concurrent-ruby (1.0.5-java) + connection_pool (2.2.1) + diff-lcs (1.3) + diplomat (2.0.2) + faraday (~> 0.9) + json + docile (1.1.5) + faraday (0.15.4) + multipart-post (>= 1.2, < 3) + faraday-http-cache (2.0.0) + faraday (~> 0.8) + ffi (1.9.25) + ffi (1.9.25-java) + hashdiff (0.3.7) + hitimes (1.3.0) + hitimes (1.3.0-java) + http_tools (0.4.5) + jmespath (1.4.0) + json (1.8.6) + json (1.8.6-java) + listen (3.1.5) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + ruby_dep (~> 1.2) + multipart-post (2.0.0) + net-http-persistent (2.9.4) + rake (10.5.0) + rb-fsevent (0.10.3) + rb-inotify (0.9.10) + ffi (>= 0.5.0, < 2) + redis (3.3.5) + rspec (3.7.0) + rspec-core (~> 3.7.0) + rspec-expectations (~> 3.7.0) + rspec-mocks (~> 3.7.0) + rspec-core (3.7.1) + rspec-support (~> 3.7.0) + rspec-expectations (3.7.0) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.7.0) + rspec-mocks (3.7.0) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.7.0) + rspec-support (3.7.0) + rspec_junit_formatter (0.3.0) + rspec-core (>= 2, < 4, != 2.12.0) + ruby_dep (1.5.0) + semantic (1.6.1) + simplecov (0.15.1) + docile (~> 1.1.0) + json (>= 1.8, < 3) + simplecov-html (~> 0.10.0) + simplecov-html (0.10.2) + socketry (0.5.1) + hitimes (~> 1.2) + thread_safe (0.3.6) + thread_safe (0.3.6-java) + timecop (0.9.1) + +PLATFORMS + java + ruby + +DEPENDENCIES + aws-sdk-dynamodb (~> 1.18) + bundler (~> 1.7) + codeclimate-test-reporter (~> 0) + connection_pool (>= 2.1.2) + diplomat (>= 2.0.2) + ldclient-rb! + listen (~> 3.0) + rake (~> 10.0) + redis (~> 3.3.5) + rspec (~> 3.2) + rspec_junit_formatter (~> 0.3.0) + timecop (~> 0.9.1) + +BUNDLED WITH + 1.17.1 From 3b5b08e2f61243f28748c59f6722ac1a914481c8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 16:42:24 -0800 Subject: [PATCH 078/182] update lockfile --- Gemfile.lock | 31 ++++--------------------------- 1 file changed, 4 insertions(+), 27 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 17c5725e..6c4673e4 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -2,40 +2,23 @@ PATH remote: . specs: ldclient-rb (5.4.1) - concurrent-ruby (~> 1.0.4) + concurrent-ruby (~> 1.0) faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) - hashdiff (~> 0.2) http_tools (~> 0.4.5) json (>= 1.8, < 3) net-http-persistent (~> 2.9) - semantic (~> 1.6.0) + semantic (~> 1.6) socketry (~> 0.5.1) - thread_safe (~> 0.3) GEM remote: https://rubygems.org/ specs: - aws-eventstream (1.0.1) - aws-partitions (1.125.0) - aws-sdk-core (3.44.0) - aws-eventstream (~> 1.0) - aws-partitions (~> 1.0) - aws-sigv4 (~> 1.0) - jmespath (~> 1.0) - aws-sdk-dynamodb (1.18.0) - aws-sdk-core (~> 3, >= 3.39.0) - aws-sigv4 (~> 1.0) - aws-sigv4 (1.0.3) codeclimate-test-reporter (0.6.0) simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.0.5) - concurrent-ruby (1.0.5-java) + concurrent-ruby (1.1.4) connection_pool (2.2.1) diff-lcs (1.3) - diplomat (2.0.2) - faraday (~> 0.9) - json docile (1.1.5) faraday (0.15.4) multipart-post (>= 1.2, < 3) @@ -43,11 +26,9 @@ GEM faraday (~> 0.8) ffi (1.9.25) ffi (1.9.25-java) - hashdiff (0.3.7) hitimes (1.3.0) hitimes (1.3.0-java) http_tools (0.4.5) - jmespath (1.4.0) json (1.8.6) json (1.8.6-java) listen (3.1.5) @@ -85,8 +66,6 @@ GEM simplecov-html (0.10.2) socketry (0.5.1) hitimes (~> 1.2) - thread_safe (0.3.6) - thread_safe (0.3.6-java) timecop (0.9.1) PLATFORMS @@ -94,11 +73,9 @@ PLATFORMS ruby DEPENDENCIES - aws-sdk-dynamodb (~> 1.18) bundler (~> 1.7) codeclimate-test-reporter (~> 0) connection_pool (>= 2.1.2) - diplomat (>= 2.0.2) ldclient-rb! listen (~> 3.0) rake (~> 10.0) @@ -108,4 +85,4 @@ DEPENDENCIES timecop (~> 0.9.1) BUNDLED WITH - 1.17.1 + 1.17.3 From 8656f258d42eb16c612181eb01a6b5e6ebebf225 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 12:52:42 -0800 Subject: [PATCH 079/182] use ruby-eventsource --- Gemfile.lock | 7 +++++-- lib/ldclient-rb/stream.rb | 4 ++-- spec/stream_spec.rb | 16 ++++++++-------- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 695aaadc..f376fb32 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -5,11 +5,10 @@ PATH concurrent-ruby (~> 1.0) faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) - http_tools (~> 0.4.5) json (>= 1.8, < 3) + ld-eventsource (~> 1.0) net-http-persistent (~> 2.9) semantic (~> 1.6) - socketry (~> 0.5.1) GEM remote: https://rubygems.org/ @@ -43,6 +42,10 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) + ld-eventsource (1.0.0) + concurrent-ruby (~> 1.0) + http_tools (~> 0.4.5) + socketry (~> 0.5.1) listen (3.1.5) rb-fsevent (~> 0.9, >= 0.9.4) rb-inotify (~> 0.9, >= 0.9.7) diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index adc4bf59..e4f1b3bd 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -54,11 +54,11 @@ def start read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } - @es = LaunchDarklySSE::SSEClient.new(@config.stream_uri + "/all", **opts) do |conn| + @es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn| conn.on_event { |event| process_message(event) } conn.on_error { |err| case err - when LaunchDarklySSE::HTTPError + when SSE::Errors::HTTPError status = err.status message = Util.http_error_message(status, "streaming connection", "will retry") @config.logger.error { "[LDClient] #{message}" } diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 0ab9d3ec..648833ff 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -8,13 +8,13 @@ let(:processor) { subject.new("sdk_key", config, requestor) } describe '#process_message' do - let(:put_message) { LaunchDarklySSE::StreamEvent.new(type: :put, data: '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } - let(:patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}') } - let(:patch_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } - let(:delete_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/flags/key", "version": 2}') } - let(:delete_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/segments/key", "version": 2}') } - let(:indirect_patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/put', data: "/flags/key") } - let(:indirect_patch_segment_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/patch', data: "/segments/key") } + let(:put_message) { SSE::StreamEvent.new(:put, '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } + let(:patch_flag_message) { SSE::StreamEvent.new(:patch, '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}') } + let(:patch_seg_message) { SSE::StreamEvent.new(:patch, '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } + let(:delete_flag_message) { SSE::StreamEvent.new(:delete, '{"path": "/flags/key", "version": 2}') } + let(:delete_seg_message) { SSE::StreamEvent.new(:delete, '{"path": "/segments/key", "version": 2}') } + let(:indirect_patch_flag_message) { SSE::StreamEvent.new(:'indirect/patch', "/flags/key") } + let(:indirect_patch_segment_message) { SSE::StreamEvent.new(:'indirect/patch', "/segments/key") } it "will accept PUT methods" do processor.send(:process_message, put_message) @@ -53,7 +53,7 @@ end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn - processor.send(:process_message, LaunchDarklySSE::StreamEvent.new(type: :get, data: "", id: nil)) + processor.send(:process_message, SSE::StreamEvent.new(type: :get, data: "", id: nil)) end end end From 1ced67ef78b84e0ff74bf0b8f791de45782f1d6e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 14:19:35 -0800 Subject: [PATCH 080/182] uninstall unwanted bundler upgrade in CI --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0e285fcc..26dd2cb7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -88,6 +88,8 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi + # bundler 2.0 may be preinstalled, we need to remove it if so + yes | gem uninstall bundler --version '>=2.0' || true; gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From e11bf4b05cced0e29dbf24daabd08922f8b9ba84 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 14:30:19 -0800 Subject: [PATCH 081/182] allow net-http-persistent 3.x --- Gemfile.lock | 7 ++++--- ldclient-rb.gemspec | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 6c4673e4..28f15ccf 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,13 +1,13 @@ PATH remote: . specs: - ldclient-rb (5.4.1) + ldclient-rb (5.4.2) concurrent-ruby (~> 1.0) faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) http_tools (~> 0.4.5) json (>= 1.8, < 3) - net-http-persistent (~> 2.9) + net-http-persistent (>= 2.9, < 4.0) semantic (~> 1.6) socketry (~> 0.5.1) @@ -36,7 +36,8 @@ GEM rb-inotify (~> 0.9, >= 0.9.7) ruby_dep (~> 1.2) multipart-post (2.0.0) - net-http-persistent (2.9.4) + net-http-persistent (3.0.0) + connection_pool (~> 2.2) rake (10.5.0) rb-fsevent (0.10.3) rb-inotify (0.9.10) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 46dac190..4e96b6b4 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -35,7 +35,7 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" - spec.add_runtime_dependency "net-http-persistent", "~> 2.9" + spec.add_runtime_dependency "net-http-persistent", [">= 2.9", "< 4.0"] spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" From cb2193e5c25a1c1c52fd426413c323914f873f15 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 14:19:35 -0800 Subject: [PATCH 082/182] uninstall unwanted bundler upgrade in CI --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 85f6f7cf..6fb11b32 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,6 +80,8 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi + # bundler 2.0 may be preinstalled, we need to remove it if so + yes | gem uninstall bundler --version '>=2.0' || true; gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From 3f4e432c3f892e980d300e1ea4fbedcc32ebcc80 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:19:39 -0800 Subject: [PATCH 083/182] rewrite requestor without Faraday - don't have proxy yet --- lib/ldclient-rb/polling.rb | 3 +- lib/ldclient-rb/requestor.rb | 53 +++++---- spec/http_util.rb | 103 +++++++++++++++++ spec/requestor_spec.rb | 208 +++++++++++++++++++++++++++-------- 4 files changed, 302 insertions(+), 65 deletions(-) create mode 100644 spec/http_util.rb diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 4c6769f3..17ff7c12 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -63,8 +63,7 @@ def create_worker stop end rescue StandardError => exn - @config.logger.error { "[LDClient] Exception while polling: #{exn.inspect}" } - # TODO: log_exception(__method__.to_s, exn) + Util.log_exception(@config.logger, "Exception while polling", exn) end delta = @config.poll_interval - (Time.now - started_at) if delta > 0 diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 3e244fbe..739ea277 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -1,6 +1,6 @@ +require "concurrent/atomics" require "json" require "net/http/persistent" -require "faraday/http_cache" module LaunchDarkly # @private @@ -16,14 +16,15 @@ def status # @private class Requestor + CacheEntry = Struct.new(:etag, :body) + def initialize(sdk_key, config) @sdk_key = sdk_key @config = config - @client = Faraday.new do |builder| - builder.use :http_cache, store: @config.cache_store - - builder.adapter :net_http_persistent - end + @client = Net::HTTP::Persistent.new + @client.open_timeout = @config.connect_timeout + @client.read_timeout = @config.read_timeout + @cache = @config.cache_store end def request_flag(key) @@ -39,24 +40,38 @@ def request_all_data() end def make_request(path) - uri = @config.base_uri + path - res = @client.get (uri) do |req| - req.headers["Authorization"] = @sdk_key - req.headers["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION - req.options.timeout = @config.read_timeout - req.options.open_timeout = @config.connect_timeout - if @config.proxy - req.options.proxy = Faraday::ProxyOptions.from @config.proxy - end + uri = URI(@config.base_uri + path) + req = Net::HTTP::Get.new(uri) + req["Authorization"] = @sdk_key + req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + cached = @cache.read(uri) + if !cached.nil? + req["If-None-Match"] = cached.etag end + # if @config.proxy + # req.options.proxy = Faraday::ProxyOptions.from @config.proxy + # end - @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{res.status}\n\theaders: #{res.headers}\n\tbody: #{res.body}" } + res = @client.request(uri, req) + status = res.code.to_i + @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } - if res.status < 200 || res.status >= 300 - raise UnexpectedResponseError.new(res.status) + if status == 304 && !cached.nil? + body = cached.body + else + @cache.delete(uri) + if status < 200 || status >= 300 + raise UnexpectedResponseError.new(status) + end + body = res.body + etag = res["etag"] + @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end + JSON.parse(body, symbolize_names: true) + end - JSON.parse(res.body, symbolize_names: true) + def stop + @client.shutdown end private :make_request diff --git a/spec/http_util.rb b/spec/http_util.rb new file mode 100644 index 00000000..764f8e48 --- /dev/null +++ b/spec/http_util.rb @@ -0,0 +1,103 @@ +require "webrick" +require "webrick/httpproxy" +require "webrick/https" + +class StubHTTPServer + attr_reader :requests + + @@next_port = 50000 + + def initialize + @port = StubHTTPServer.next_port + begin + base_opts = { + BindAddress: '127.0.0.1', + Port: @port, + AccessLog: [], + Logger: NullLogger.new, + RequestCallback: method(:record_request) + } + @server = create_server(@port, base_opts) + rescue Errno::EADDRINUSE + @port = StubHTTPServer.next_port + retry + end + @requests = [] + end + + def self.next_port + p = @@next_port + @@next_port = (p + 1 < 60000) ? p + 1 : 50000 + p + end + + def create_server(port, base_opts) + WEBrick::HTTPServer.new(base_opts) + end + + def start + Thread.new { @server.start } + end + + def stop + @server.shutdown + end + + def base_uri + URI("http://127.0.0.1:#{@port}") + end + + def setup_response(uri_path, &action) + @server.mount_proc(uri_path, action) + end + + def setup_ok_response(uri_path, body, content_type=nil, headers={}) + setup_response(uri_path) do |req, res| + res.status = 200 + res.content_type = content_type if !content_type.nil? + res.body = body + headers.each { |n, v| res[n] = v } + end + end + + def record_request(req, res) + @requests.push(req) + end +end + +class StubProxyServer < StubHTTPServer + attr_reader :request_count + attr_accessor :connect_status + + def initialize + super + @request_count = 0 + end + + def create_server(port, base_opts) + WEBrick::HTTPProxyServer.new(base_opts.merge({ + ProxyContentHandler: proc do |req,res| + if !@connect_status.nil? + res.status = @connect_status + end + @request_count += 1 + end + })) + end +end + +class NullLogger + def method_missing(*) + self + end +end + +def with_server(server = nil) + server = StubHTTPServer.new if server.nil? + begin + server.start + yield server + ensure + server.stop + end +end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index b7838200..3d4a666f 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -1,52 +1,172 @@ require "spec_helper" -require "faraday" +require "http_util" + +$sdk_key = "secret" describe LaunchDarkly::Requestor do - describe ".request_all_flags" do - describe "with a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :proxy => "http://proxy.com", - :base_uri => "http://ld.com" + def with_requestor(base_uri) + r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new(base_uri: base_uri)) + yield r + r.stop + end + + describe "request_all_flags" do + it "uses expected URI and headers" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_all_data() + expect(server.requests.count).to eq 1 + expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-all" + expect(server.requests[0].header).to include({ + "authorization" => [ $sdk_key ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] + }) + end + end + end + + it "parses response" do + expected_data = { flags: { x: { key: "x" } } } + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", expected_data.to_json) + data = requestor.request_all_data() + expect(data).to eq expected_data + end + end + end + + it "sends etag from previous response" do + etag = "xyz" + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 200 + res.body = "{}" + res["ETag"] = etag + end + requestor.request_all_data() + expect(server.requests.count).to eq 1 + + requestor.request_all_data() + expect(server.requests.count).to eq 2 + expect(server.requests[1].header).to include({ "if-none-match" => [ etag ] }) + end + end + end + + it "can reuse cached data" do + etag = "xyz" + expected_data = { flags: { x: { key: "x" } } } + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 200 + res.body = expected_data.to_json + res["ETag"] = etag + end + requestor.request_all_data() + expect(server.requests.count).to eq 1 + + server.setup_response("/") do |req, res| + res.status = 304 + end + data = requestor.request_all_data() + expect(server.requests.count).to eq 2 + expect(server.requests[1].header).to include({ "if-none-match" => [ etag ] }) + expect(data).to eq expected_data + end + end + end + + it "replaces cached data with new data" do + etag1 = "abc" + etag2 = "xyz" + expected_data1 = { flags: { x: { key: "x" } } } + expected_data2 = { flags: { y: { key: "y" } } } + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 200 + res.body = expected_data1.to_json + res["ETag"] = etag1 + end + data = requestor.request_all_data() + expect(data).to eq expected_data1 + expect(server.requests.count).to eq 1 + + server.setup_response("/") do |req, res| + res.status = 304 + end + data = requestor.request_all_data() + expect(data).to eq expected_data1 + expect(server.requests.count).to eq 2 + expect(server.requests[1].header).to include({ "if-none-match" => [ etag1 ] }) + + server.setup_response("/") do |req, res| + res.status = 200 + res.body = expected_data2.to_json + res["ETag"] = etag2 + end + data = requestor.request_all_data() + expect(data).to eq expected_data2 + expect(server.requests.count).to eq 3 + expect(server.requests[2].header).to include({ "if-none-match" => [ etag1 ] }) + + server.setup_response("/") do |req, res| + res.status = 304 + end + data = requestor.request_all_data() + expect(data).to eq expected_data2 + expect(server.requests.count).to eq 4 + expect(server.requests[3].header).to include({ "if-none-match" => [ etag2 ] }) + end + end + end + + it "throws exception for error status" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 400 + end + expect { requestor.request_all_data() }.to raise_error(LaunchDarkly::UnexpectedResponseError) + end + end + end + end + + describe "request_flag" do + it "uses expected URI and headers" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_flag("key") + expect(server.requests.count).to eq 1 + expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-flags/key" + expect(server.requests[0].header).to include({ + "authorization" => [ $sdk_key ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] }) - ) - } - it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy[:uri]).to eq URI("http://proxy.com") - double(body: '{"foo": "bar"}', status: 200, headers: {}) - end - - requestor.request_all_data() - end - end - describe "without a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :base_uri => "http://ld.com" + end + end + end + end + + describe "request_segment" do + it "uses expected URI and headers" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_segment("key") + expect(server.requests.count).to eq 1 + expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-segments/key" + expect(server.requests[0].header).to include({ + "authorization" => [ $sdk_key ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] }) - ) - } - it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy).to eq nil - double(body: '{"foo": "bar"}', status: 200, headers: {}) - end - requestor.request_all_data() + end end end end From b250437df78c99c6c0774e72a5f75ca06c5adf4e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:21:32 -0800 Subject: [PATCH 084/182] reduce intermittent HTTP errors on stub server by not reusing ports --- spec/http_util.rb | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/spec/http_util.rb b/spec/http_util.rb index 434cafc8..764f8e48 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -1,4 +1,3 @@ -require "spec_helper" require "webrick" require "webrick/httpproxy" require "webrick/https" @@ -6,8 +5,10 @@ class StubHTTPServer attr_reader :requests + @@next_port = 50000 + def initialize - @port = 50000 + @port = StubHTTPServer.next_port begin base_opts = { BindAddress: '127.0.0.1', @@ -18,12 +19,18 @@ def initialize } @server = create_server(@port, base_opts) rescue Errno::EADDRINUSE - @port += 1 + @port = StubHTTPServer.next_port retry end @requests = [] end + def self.next_port + p = @@next_port + @@next_port = (p + 1 < 60000) ? p + 1 : 50000 + p + end + def create_server(port, base_opts) WEBrick::HTTPServer.new(base_opts) end From 5b4b8555073bcc8c684e0317c830aee9c8fd543f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:43:46 -0800 Subject: [PATCH 085/182] fix charset handling --- lib/ldclient-rb/requestor.rb | 30 ++++++++++++++++++++++++++---- spec/requestor_spec.rb | 23 +++++++++++++++++++++++ 2 files changed, 49 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 739ea277..94683bcb 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -39,6 +39,12 @@ def request_all_data() make_request("/sdk/latest-all") end + def stop + @client.shutdown + end + + private + def make_request(path) uri = URI(@config.base_uri + path) req = Net::HTTP::Get.new(uri) @@ -63,17 +69,33 @@ def make_request(path) if status < 200 || status >= 300 raise UnexpectedResponseError.new(status) end - body = res.body + body = fix_encoding(res.body, res["content-type"]) etag = res["etag"] @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end JSON.parse(body, symbolize_names: true) end - def stop - @client.shutdown + def fix_encoding(body, content_type) + return body if content_type.nil? + media_type, charset = parse_content_type(content_type) + return body if charset.nil? + body.force_encoding(Encoding::find(charset)).encode(Encoding::UTF_8) end - private :make_request + def parse_content_type(value) + return [nil, nil] if value.nil? || value == '' + parts = value.split(/; */) + return [value, nil] if parts.count < 2 + charset = nil + parts.each do |part| + fields = part.split('=') + if fields.count >= 2 && fields[0] == 'charset' + charset = fields[1] + break + end + end + return [parts[0], charset] + end end end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 3d4a666f..3cc20991 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -125,6 +125,29 @@ def with_requestor(base_uri) end end + it "uses UTF-8 encoding by default" do + content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["blue", "grėeń"]}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json") + with_requestor(server.base_uri.to_s) do |requestor| + data = requestor.request_all_data + expect(data).to eq(JSON.parse(content, symbolize_names: true)) + end + end + end + + it "detects other encodings from Content-Type" do + content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["proszę", "dziękuję"]}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content.encode(Encoding::ISO_8859_2), + "text/plain; charset=ISO-8859-2") + with_requestor(server.base_uri.to_s) do |requestor| + data = requestor.request_all_data + expect(data).to eq(JSON.parse(content, symbolize_names: true)) + end + end + end + it "throws exception for error status" do with_server do |server| with_requestor(server.base_uri.to_s) do |requestor| From 7a7c273764921f9f33ed3fd3953b1d4a2aacb8cc Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:59:32 -0800 Subject: [PATCH 086/182] add test for proxy support, remove obsolete property --- lib/ldclient-rb/config.rb | 15 --------------- lib/ldclient-rb/requestor.rb | 5 +---- spec/requestor_spec.rb | 18 ++++++++++++++++++ 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 64ad7378..c14f59c8 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -53,7 +53,6 @@ def initialize(opts = {}) @use_ldd = opts.has_key?(:use_ldd) ? opts[:use_ldd] : Config.default_use_ldd @offline = opts.has_key?(:offline) ? opts[:offline] : Config.default_offline @poll_interval = opts.has_key?(:poll_interval) && opts[:poll_interval] > Config.default_poll_interval ? opts[:poll_interval] : Config.default_poll_interval - @proxy = opts[:proxy] || Config.default_proxy @all_attributes_private = opts[:all_attributes_private] || false @private_attribute_names = opts[:private_attribute_names] || [] @send_events = opts.has_key?(:send_events) ? opts[:send_events] : Config.default_send_events @@ -184,12 +183,6 @@ def offline? # attr_reader :feature_store - # - # The proxy configuration string. - # @return [String] - # - attr_reader :proxy - # # True if all user attributes (other than the key) should be considered private. This means # that the attribute values will not be sent to LaunchDarkly in analytics events and will not @@ -336,14 +329,6 @@ def self.default_connect_timeout 2 end - # - # The default value for {#proxy}. - # @return [String] nil - # - def self.default_proxy - nil - end - # # The default value for {#logger}. # @return [::Logger] the Rails logger if in Rails, or a default [::Logger] at WARN level otherwise diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 94683bcb..5f48d7ff 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -7,6 +7,7 @@ module LaunchDarkly class UnexpectedResponseError < StandardError def initialize(status) @status = status + super("HTTP error #{status}") end def status @@ -54,10 +55,6 @@ def make_request(path) if !cached.nil? req["If-None-Match"] = cached.etag end - # if @config.proxy - # req.options.proxy = Faraday::ProxyOptions.from @config.proxy - # end - res = @client.request(uri, req) status = res.code.to_i @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 3cc20991..eb6c2b62 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -158,6 +158,24 @@ def with_requestor(base_uri) end end end + + it "can use a proxy server" do + content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + with_server(StubProxyServer.new) do |proxy| + begin + ENV["http_proxy"] = proxy.base_uri.to_s + with_requestor(server.base_uri.to_s) do |requestor| + data = requestor.request_all_data + expect(data).to eq(JSON.parse(content, symbolize_names: true)) + end + ensure + ENV["http_proxy"] = nil + end + end + end + end end describe "request_flag" do From 1502e61d05406ee51e2007e9a429bdbd67126f64 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 19:16:49 -0800 Subject: [PATCH 087/182] rm duplicate test code --- spec/sse_client/sse_client_spec.rb | 2 +- spec/sse_client/sse_shared.rb | 82 -------------------------- spec/sse_client/streaming_http_spec.rb | 2 +- 3 files changed, 2 insertions(+), 84 deletions(-) delete mode 100644 spec/sse_client/sse_shared.rb diff --git a/spec/sse_client/sse_client_spec.rb b/spec/sse_client/sse_client_spec.rb index 54f1f5c7..71e96112 100644 --- a/spec/sse_client/sse_client_spec.rb +++ b/spec/sse_client/sse_client_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of SSEClient against a real server diff --git a/spec/sse_client/sse_shared.rb b/spec/sse_client/sse_shared.rb deleted file mode 100644 index 3ecabb57..00000000 --- a/spec/sse_client/sse_shared.rb +++ /dev/null @@ -1,82 +0,0 @@ -require "spec_helper" -require "webrick" -require "webrick/httpproxy" -require "webrick/https" - -class StubHTTPServer - def initialize - @port = 50000 - begin - @server = create_server(@port) - rescue Errno::EADDRINUSE - @port += 1 - retry - end - end - - def create_server(port) - WEBrick::HTTPServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new - ) - end - - def start - Thread.new { @server.start } - end - - def stop - @server.shutdown - end - - def base_uri - URI("http://127.0.0.1:#{@port}") - end - - def setup_response(uri_path, &action) - @server.mount_proc(uri_path, action) - end -end - -class StubProxyServer < StubHTTPServer - attr_reader :request_count - attr_accessor :connect_status - - def initialize - super - @request_count = 0 - end - - def create_server(port) - WEBrick::HTTPProxyServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new, - ProxyContentHandler: proc do |req,res| - if !@connect_status.nil? - res.status = @connect_status - end - @request_count += 1 - end - ) - end -end - -class NullLogger - def method_missing(*) - self - end -end - -def with_server(server = nil) - server = StubHTTPServer.new if server.nil? - begin - server.start - yield server - ensure - server.stop - end -end diff --git a/spec/sse_client/streaming_http_spec.rb b/spec/sse_client/streaming_http_spec.rb index 7dfac9bd..136a727a 100644 --- a/spec/sse_client/streaming_http_spec.rb +++ b/spec/sse_client/streaming_http_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of HTTP requests against a real server From 485784240ffe4c747b1e60eb93ee70dbaa8b0055 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 19:58:28 -0800 Subject: [PATCH 088/182] change event sending to use Net::HTTP; completely remove Faraday --- ldclient-rb.gemspec | 2 - lib/ldclient-rb/cache_store.rb | 6 +-- lib/ldclient-rb/config.rb | 7 ++-- lib/ldclient-rb/events.rb | 44 +++++++++++--------- spec/events_spec.rb | 73 +++++++++++++++++++++++++--------- spec/http_util.rb | 6 +++ 6 files changed, 92 insertions(+), 46 deletions(-) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 9d541c18..15c20739 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -33,8 +33,6 @@ Gem::Specification.new do |spec| spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb spec.add_runtime_dependency "json", [">= 1.8", "< 3"] - spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] - spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "net-http-persistent", "~> 2.9" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index 8451bb5f..b91b363d 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -2,11 +2,9 @@ module LaunchDarkly # - # A thread-safe in-memory store suitable for use with the Faraday caching HTTP client. Uses the - # concurrent-ruby gem's Map as the underlying cache. + # A thread-safe in-memory store that uses the same semantics that Faraday would expect, although we + # no longer use Faraday. This is used by Requestor, when we are not in a Rails environment. # - # @see https://github.com/plataformatec/faraday-http-cache - # @see https://github.com/ruby-concurrency # @private # class ThreadSafeMemoryStore diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index c14f59c8..e5217f45 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -152,9 +152,10 @@ def offline? attr_reader :capacity # - # A store for HTTP caching. This must support the semantics used by the - # [`faraday-http-cache`](https://github.com/plataformatec/faraday-http-cache) gem. Defaults - # to the Rails cache in a Rails environment, or a thread-safe in-memory store otherwise. + # A store for HTTP caching (used only in polling mode). This must support the semantics used by + # the [`faraday-http-cache`](https://github.com/plataformatec/faraday-http-cache) gem, although + # the SDK no longer uses Faraday. Defaults to the Rails cache in a Rails environment, or a + # thread-safe in-memory store otherwise. # @return [Object] # attr_reader :cache_store diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index cbae5ac5..02885904 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,9 +1,9 @@ require "concurrent" require "concurrent/atomics" require "concurrent/executors" +require "net/http/persistent" require "thread" require "time" -require "faraday" module LaunchDarkly MAX_FLUSH_WORKERS = 5 @@ -115,7 +115,12 @@ class EventDispatcher def initialize(queue, sdk_key, config, client) @sdk_key = sdk_key @config = config - @client = client ? client : Faraday.new + + @client = client ? client : Net::HTTP::Persistent.new do |c| + c.open_timeout = @config.connect_timeout + c.read_timeout = @config.read_timeout + end + @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) @@ -162,7 +167,7 @@ def main_loop(queue, buffer, flush_workers) def do_shutdown(flush_workers) flush_workers.shutdown flush_workers.wait_for_termination - # There seems to be no such thing as "close" in Faraday: https://github.com/lostisland/faraday/issues/241 + @client.shutdown end def synchronize_for_testing(flush_workers) @@ -246,16 +251,17 @@ def trigger_flush(buffer, flush_workers) end def handle_response(res) - if res.status >= 400 - message = Util.http_error_message(res.status, "event delivery", "some events were dropped") + status = res.code.to_i + if status >= 400 + message = Util.http_error_message(status, "event delivery", "some events were dropped") @config.logger.error { "[LDClient] #{message}" } - if !Util.http_error_recoverable?(res.status) + if !Util.http_error_recoverable?(status) @disabled.value = true end else - if !res.headers.nil? && res.headers.has_key?("Date") + if !res["date"].nil? begin - res_time = (Time.httpdate(res.headers["Date"]).to_f * 1000).to_i + res_time = (Time.httpdate(res["date"]).to_f * 1000).to_i @last_known_past_time.value = res_time rescue ArgumentError end @@ -317,21 +323,21 @@ def run(sdk_key, config, client, payload, formatter) end begin config.logger.debug { "[LDClient] sending #{events_out.length} events: #{body}" } - res = client.post (config.events_uri + "/bulk") do |req| - req.headers["Authorization"] = sdk_key - req.headers["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION - req.headers["Content-Type"] = "application/json" - req.headers["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - req.body = body - req.options.timeout = config.read_timeout - req.options.open_timeout = config.connect_timeout - end + uri = URI(config.events_uri + "/bulk") + req = Net::HTTP::Post.new(uri) + req.content_type = "application/json" + req.body = body + req["Authorization"] = sdk_key + req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + res = client.request(uri, req) rescue StandardError => exn config.logger.warn { "[LDClient] Error flushing events: #{exn.inspect}." } next end - if res.status < 200 || res.status >= 300 - if Util.http_error_recoverable?(res.status) + status = res.code.to_i + if status < 200 || status >= 300 + if Util.http_error_recoverable?(status) next end end diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 56bd14a2..86cc67b6 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -1,5 +1,5 @@ +require "http_util" require "spec_helper" -require "faraday" require "time" describe LaunchDarkly::EventProcessor do @@ -348,7 +348,7 @@ @ep.flush @ep.wait_until_inactive - expect(hc.get_request.headers["Authorization"]).to eq "sdk_key" + expect(hc.get_request["authorization"]).to eq "sdk_key" end def verify_unrecoverable_http_error(status) @@ -414,7 +414,7 @@ def verify_recoverable_http_error(status) e = { kind: "identify", user: user } @ep.add_event(e) - hc.set_exception(Faraday::Error::ConnectionFailed.new("fail")) + hc.set_exception(IOError.new("deliberate error")) @ep.flush @ep.wait_until_inactive @@ -423,6 +423,46 @@ def verify_recoverable_http_error(status) expect(hc.get_request).to be_nil # no 3rd request end + it "makes actual HTTP request with correct headers" do + e = { kind: "identify", key: user[:key], user: user } + with_server do |server| + server.setup_ok_response("/bulk", "") + + @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) + @ep.add_event(e) + @ep.flush + + req = server.await_request + expect(req.header).to include({ + "authorization" => [ "sdk_key" ], + "content-type" => [ "application/json" ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + "x-launchdarkly-event-schema" => [ "3" ] + }) + end + end + + it "can use a proxy server" do + e = { kind: "identify", key: user[:key], user: user } + with_server do |server| + server.setup_ok_response("/bulk", "") + + with_server(StubProxyServer.new) do |proxy| + begin + ENV["http_proxy"] = proxy.base_uri.to_s + @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) + @ep.add_event(e) + @ep.flush + + req = server.await_request + expect(req["content-type"]).to eq("application/json") + ensure + ENV["http_proxy"] = nil + end + end + end + end + def index_event(e, user) { kind: "index", @@ -496,38 +536,35 @@ def reset @status = 200 end - def post(uri) - req = Faraday::Request.create("POST") - req.headers = {} - req.options = Faraday::RequestOptions.new - yield req + def request(uri, req) @requests.push(req) if @exception raise @exception else - resp = Faraday::Response.new headers = {} if @server_time headers["Date"] = @server_time.httpdate end - resp.finish({ - status: @status ? @status : 200, - response_headers: headers - }) - resp + FakeResponse.new(@status ? @status : 200, headers) end end def get_request @requests.shift end + + def shutdown + end end class FakeResponse - def initialize(status) - @status = status - end + include Net::HTTPHeader - attr_reader :status + attr_reader :code + + def initialize(status, headers) + @code = status.to_s + initialize_http_header(headers) + end end end diff --git a/spec/http_util.rb b/spec/http_util.rb index 764f8e48..e43e2ded 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -23,6 +23,7 @@ def initialize retry end @requests = [] + @requests_queue = Queue.new end def self.next_port @@ -62,6 +63,11 @@ def setup_ok_response(uri_path, body, content_type=nil, headers={}) def record_request(req, res) @requests.push(req) + @requests_queue << req + end + + def await_request + @requests_queue.pop end end From d658715b420ee029d85b442f643785a759aa4d5c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 8 Jan 2019 20:42:57 -0800 Subject: [PATCH 089/182] implement dependency ordering for feature store data --- lib/ldclient-rb/impl/store_client_wrapper.rb | 47 ++++++++++++++ lib/ldclient-rb/impl/store_data_set_sorter.rb | 57 +++++++++++++++++ lib/ldclient-rb/in_memory_store.rb | 13 +++- .../integrations/util/store_wrapper.rb | 5 ++ lib/ldclient-rb/interfaces.rb | 5 ++ lib/ldclient-rb/ldclient.rb | 14 +++- spec/ldclient_spec.rb | 64 +++++++++++++++++++ 7 files changed, 200 insertions(+), 5 deletions(-) create mode 100644 lib/ldclient-rb/impl/store_client_wrapper.rb create mode 100644 lib/ldclient-rb/impl/store_data_set_sorter.rb diff --git a/lib/ldclient-rb/impl/store_client_wrapper.rb b/lib/ldclient-rb/impl/store_client_wrapper.rb new file mode 100644 index 00000000..f0948251 --- /dev/null +++ b/lib/ldclient-rb/impl/store_client_wrapper.rb @@ -0,0 +1,47 @@ +require "ldclient-rb/interfaces" +require "ldclient-rb/impl/store_data_set_sorter" + +module LaunchDarkly + module Impl + # + # Provides additional behavior that the client requires before or after feature store operations. + # Currently this just means sorting the data set for init(). In the future we may also use this + # to provide an update listener capability. + # + class FeatureStoreClientWrapper + include Interfaces::FeatureStore + + def initialize(store) + @store = store + end + + def init(all_data) + @store.init(FeatureStoreDataSetSorter.sort_all_collections(all_data)) + end + + def get(kind, key) + @store.get(kind, key) + end + + def all(kind) + @store.all(kind) + end + + def upsert(kind, item) + @store.upsert(kind, item) + end + + def delete(kind, key, version) + @store.delete(kind, key, version) + end + + def initialized? + @store.initialized? + end + + def stop + @store.stop + end + end + end +end diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb new file mode 100644 index 00000000..4f3635cd --- /dev/null +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -0,0 +1,57 @@ + +module LaunchDarkly + module Impl + # + # Implements a dependency graph ordering for data to be stored in a feature store. We must use this + # on every data set that will be passed to the feature store's init() method. + # + class FeatureStoreDataSetSorter + # + # Returns a copy of the input hash that has the following guarantees: the iteration order of the outer + # hash will be in ascending order by the VersionDataKind's :priority property (if any), and for each + # data kind that has a :get_dependency_keys function, the inner hash will have an iteration order + # where B is before A if A has a dependency on B. + # + # This implementation relies on the fact that hashes in Ruby have an iteration order that is the same + # as the insertion order. Also, due to the way we deserialize JSON received from LaunchDarkly, the + # keys in the inner hash will always be symbols. + # + def self.sort_all_collections(all_data) + outer_hash = {} + kinds = all_data.keys.sort_by { |k| + k[:priority].nil? ? k[:namespace].length : k[:priority] # arbitrary order if priority is unknown + } + kinds.each do |kind| + items = all_data[kind] + outer_hash[kind] = self.sort_collection(kind, items) + end + outer_hash + end + + def self.sort_collection(kind, input) + dependency_fn = kind[:get_dependency_keys] + return input if dependency_fn.nil? || input.empty? + remaining_items = input.clone + items_out = {} + while !remaining_items.empty? + # pick a random item that hasn't been updated yet + remaining_items.each do |key, item| + self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) + break + end + end + items_out + end + + def self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) + item_key = item[:key].to_sym + remaining_items.delete(item_key) # we won't need to visit this item again + dependency_fn.call(item).each do |dep_key| + dep_item = remaining_items[dep_key.to_sym] + self.add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) if !dep_item.nil? + end + items_out[item_key] = item + end + end + end +end diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 4814c85d..c959f399 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -6,12 +6,21 @@ module LaunchDarkly # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. + # + # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter + # to ensure data consistency during non-atomic updates. + + # @private FEATURES = { - namespace: "features" + namespace: "features", + priority: 1, # that is, features should be stored after segments + get_dependency_keys: lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } } }.freeze + # @private SEGMENTS = { - namespace: "segments" + namespace: "segments", + priority: 0 }.freeze # diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 46a648c1..eef22d5e 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -151,6 +151,11 @@ module FeatureStoreCore # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, # but the wrapper will take care of updating the cache if caching is enabled. # + # If possible, the store should update the entire data set atomically. If that is not possible, + # it should iterate through the outer hash and then the inner hash using the existing iteration + # order of those hashes (the SDK will ensure that the items were inserted into the hashes in + # the correct order), storing each item, and then delete any leftover items at the very end. + # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities # @return [void] diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 912472b5..b6920fb5 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -33,6 +33,11 @@ module FeatureStore # date-- there is no need to perform individual version comparisons between the existing # objects and the supplied features. # + # If possible, the store should update the entire data set atomically. If that is not possible, + # it should iterate through the outer hash and then the inner hash using the existing iteration + # order of those hashes (the SDK will ensure that the items were inserted into the hashes in + # the correct order), storing each item, and then delete any leftover items at the very end. + # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities # @return [void] diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 868c65bd..d9a09c65 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" require "digest/sha1" require "logger" @@ -23,8 +24,15 @@ class LDClient # @return [LDClient] The LaunchDarkly client instance def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key - @config = config - @store = config.feature_store + + # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add + # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses + # the feature store through the Config object, so we need to make a new Config that uses + # the wrapped store. + @store = Impl::FeatureStoreClientWrapper.new(config.feature_store) + updated_config = config.clone + updated_config.instance_variable_set(:@feature_store, @store) + @config = updated_config if @config.offline? || !@config.send_events @event_processor = NullEventProcessor.new @@ -39,7 +47,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) data_source_or_factory = @config.data_source || self.method(:create_default_data_source) if data_source_or_factory.respond_to? :call - @data_source = data_source_or_factory.call(sdk_key, config) + @data_source = data_source_or_factory.call(sdk_key, @config) else @data_source = data_source_or_factory end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index b3a9592c..453f4b53 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -375,4 +375,68 @@ def event_processor expect(ep).not_to be_a(LaunchDarkly::NullEventProcessor) end end + + describe "feature store data ordering" do + let(:dependency_ordering_test_data) { + { + LaunchDarkly::FEATURES => { + a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] }, + b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] }, + c: { key: "c" }, + d: { key: "d" }, + e: { key: "e" }, + f: { key: "f" } + }, + LaunchDarkly::SEGMENTS => { + o: { key: "o" } + } + } + } + + class FakeFeatureStore + attr_reader :received_data + + def init(all_data) + @received_data = all_data + end + end + + class FakeUpdateProcessor + def initialize(store, data) + @store = store + @data = data + end + + def start + @store.init(@data) + ev = Concurrent::Event.new + ev.set + ev + end + + def stop + end + + def initialized? + true + end + end + + it "passes data set to feature store in correct order on init" do + store = FakeFeatureStore.new + data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store, + dependency_ordering_test_data) } + config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory) + client = subject.new("secret", config) + + data = store.received_data + expect(data).not_to be_nil + expect(data.count).to eq(2) + + puts(data) + + # Segments should always come first + expect(data.keys[0]).to be(LaunchDarkly::SEGMENTS) + end + end end \ No newline at end of file From ed302ad79bd63c2197eadff8c2e9ce410eed54c1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 8 Jan 2019 22:17:51 -0800 Subject: [PATCH 090/182] fix incomplete test --- spec/ldclient_spec.rb | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 453f4b53..fca81ab0 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -432,11 +432,26 @@ def initialized? data = store.received_data expect(data).not_to be_nil expect(data.count).to eq(2) - - puts(data) # Segments should always come first expect(data.keys[0]).to be(LaunchDarkly::SEGMENTS) + expect(data.values[0].count).to eq(dependency_ordering_test_data[LaunchDarkly::SEGMENTS].count) + + # Features should be ordered so that a flag always appears after its prerequisites, if any + expect(data.keys[1]).to be(LaunchDarkly::FEATURES) + flags_map = data.values[1] + flags_list = flags_map.values + expect(flags_list.count).to eq(dependency_ordering_test_data[LaunchDarkly::FEATURES].count) + flags_list.each_with_index do |item, item_index| + (item[:prerequisites] || []).each do |prereq| + prereq = flags_map[prereq[:key].to_sym] + prereq_index = flags_list.index(prereq) + if prereq_index > item_index + all_keys = (flags_list.map { |f| f[:key] }).join(", ") + raise "#{item[:key]} depends on #{prereq[:key]}, but #{item[:key]} was listed first; keys in order are [#{all_keys}]" + end + end + end end end end \ No newline at end of file From 8436be4f1e6ad7f1ff48208969fb4d6e8e73ff61 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 12:00:51 -0800 Subject: [PATCH 091/182] use Hash.first --- lib/ldclient-rb/impl/store_data_set_sorter.rb | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index 4f3635cd..4454fe75 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -35,10 +35,8 @@ def self.sort_collection(kind, input) items_out = {} while !remaining_items.empty? # pick a random item that hasn't been updated yet - remaining_items.each do |key, item| - self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) - break - end + key, item = remaining_items.first + self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) end items_out end From e84fbe7ab74894cc4ba600b05d043cc949eae21b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 12:58:47 -0800 Subject: [PATCH 092/182] add test for Unicode in feature store serialization --- spec/feature_store_spec_base.rb | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 0e0f1ca9..3580a67f 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -109,4 +109,14 @@ def new_version_plus(f, deltaVersion, attrs = {}) store.delete(LaunchDarkly::FEATURES, key0, feature0[:version] - 1) expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 end + + it "stores Unicode data correctly" do + flag = { + key: "tęst-feåtūre-flæg😺", + version: 1, + deleted: false + } + store.upsert(LaunchDarkly::FEATURES, flag) + expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + end end From 89209237faa8f29b2b063839a38c491a7bafda40 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 13:38:43 -0800 Subject: [PATCH 093/182] fill in the rest of the Consul implementation --- Gemfile.lock | 6 +- .../impl/integrations/consul_impl.rb | 84 ++++++++++++------- 2 files changed, 57 insertions(+), 33 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 72158223..2e96a86a 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -6,6 +6,7 @@ PATH faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) json (>= 1.8, < 3) + ld-eventsource (~> 1.0) net-http-persistent (>= 2.9, < 4.0) semantic (~> 1.6) @@ -28,6 +29,9 @@ GEM concurrent-ruby (1.1.4) connection_pool (2.2.1) diff-lcs (1.3) + diplomat (2.0.2) + faraday (~> 0.9) + json docile (1.1.5) faraday (0.15.4) multipart-post (>= 1.2, < 3) @@ -36,7 +40,6 @@ GEM ffi (1.9.25) ffi (1.9.25-java) hitimes (1.3.0) - hitimes (1.3.0-java) http_tools (0.4.5) jmespath (1.4.0) json (1.8.6) @@ -92,6 +95,7 @@ DEPENDENCIES bundler (~> 1.7) codeclimate-test-reporter (~> 0) connection_pool (>= 2.1.2) + diplomat (>= 2.0.2) ldclient-rb! listen (~> 3.0) rake (~> 10.0) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 48d308c2..5044f33c 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -22,15 +22,15 @@ def initialize(opts) @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger - @client = Diplomat::Kv.new(configuration: opts[:consul_config]) - + Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") end def init_internal(all_data) # Start by reading the existing keys; we will later delete any of these that weren't in all_data. - unused_old_keys = set() - unused_old_keys.merge(@client.get(@prefix, keys: true, recurse: true)) + unused_old_keys = Set.new + keys = Diplomat::Kv.get(@prefix, { keys: true, recurse: true }, :return) + unused_old_keys.merge(keys) if keys != "" ops = [] num_items = 0 @@ -47,12 +47,12 @@ def init_internal(all_data) end # Now delete any previously existing items whose keys were not in the current data - unused_old_keys.each do |tuple| + unused_old_keys.each do |key| ops.push({ 'KV' => { 'Verb' => 'delete', 'Key' => key } }) end # Now set the special key that we check in initialized_internal? - ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => '' } }) + ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => inited_key, 'Value' => '' } }) ConsulUtil.batch_operations(ops) @@ -60,56 +60,76 @@ def init_internal(all_data) end def get_internal(kind, key) - - resp = get_item_by_keys(namespace_for_kind(kind), key) - unmarshal_item(resp.item) + value = Diplomat::Kv.get(item_key(kind, key), {}, :return) # :return means "don't throw an error if not found" + (value.nil? || value == "") ? nil : JSON.parse(value, symbolize_names: true) end def get_all_internal(kind) items_out = {} - + results = Diplomat::Kv.get(kind_key(kind), { recurse: true }, :return) + (results == "" ? [] : results).each do |result| + value = result[:value] + if !value.nil? + item = JSON.parse(value, symbolize_names: true) + items_out[item[:key].to_sym] = item + end + end items_out end def upsert_internal(kind, new_item) - + key = item_key(kind, new_item[:key]) + json = new_item.to_json + + # We will potentially keep retrying indefinitely until someone's write succeeds + while true + old_value = Diplomat::Kv.get(key, { decode_values: true }, :return) + if old_value.nil? || old_value == "" + mod_index = 0 + else + puts("old_value = #{old_value}") + old_item = JSON.parse(old_value[0]["Value"], symbolize_names: true) + # Check whether the item is stale. If so, don't do the update (and return the existing item to + # FeatureStoreWrapper so it can be cached) + if old_item[:version] >= new_item[:version] + return old_item + end + mod_index = old_value[0]["ModifyIndex"] + end + + # Otherwise, try to write. We will do a compare-and-set operation, so the write will only succeed if + # the key's ModifyIndex is still equal to the previous value. If the previous ModifyIndex was zero, + # it means the key did not previously exist and the write will only succeed if it still doesn't exist. + success = Diplomat::Kv.put(key, json, cas: mod_index) + return new_item if success + + # If we failed, retry the whole shebang + @logger.debug { "Concurrent modification detected, retrying" } + end end def initialized_internal? - + value = Diplomat::Kv.get(inited_key, {}, :return) + !value.nil? && value != "" end def stop - # There's no way to close the Consul client + # There's no Consul client instance to dispose of end private def item_key(kind, key) - kind_key(kind) + '/' + key + kind_key(kind) + key.to_s end def kind_key(kind) - @prefix + kind[:namespace] + @prefix + kind[:namespace] + '/' end def inited_key @prefix + '$inited' end - - def marshal_item(kind, item) - make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ - VERSION_ATTRIBUTE => item[:version], - ITEM_JSON_ATTRIBUTE => item.to_json - }) - end - - def unmarshal_item(item) - return nil if item.nil? || item.length == 0 - json_attr = item[ITEM_JSON_ATTRIBUTE] - raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? - JSON.parse(json_attr, symbolize_names: true) - end end class ConsulUtil @@ -117,10 +137,10 @@ class ConsulUtil # Submits as many transactions as necessary to submit all of the given operations. # The ops array is consumed. # - def self.batch_write_requests(ops) - batch_size = 64 # Consul can only do this many at a time + def self.batch_operations(ops) + batch_size = 64 # Consul can only do this many at a time while true - chunk = requests.shift(batch_size) + chunk = ops.shift(batch_size) break if chunk.empty? Diplomat::Kv.txn(chunk) end From 21c79fe007ca47b2ff6b5fcc1bf1068a4baa1517 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 13:47:20 -0800 Subject: [PATCH 094/182] minor doc fixes --- README.md | 8 +++++--- lib/ldclient-rb/integrations/consul.rb | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 43819554..a194443f 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,8 @@ Note that this gem will automatically switch to using the Rails logger it is det HTTPS proxy ------------- +----------- + The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. How to set the HTTPS_PROXY environment variable on Mac/Linux systems: @@ -124,10 +125,11 @@ end Database integrations --------------------- -Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [source code](https://github.com/launchdarkly/ruby-client-private/tree/master/lib/ldclient-rb/integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +Feature flag data can be kept in a persistent store using Redis, DynamoDB, or Consul. These adapters are implemented in the `LaunchDarkly::Integrations::Redis`, `LaunchDarkly::Integrations::DynamoDB`, and `LaunchDarkly::Integrations::Consul` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [API documentation](https://www.rubydoc.info/gems/ldclient-rb/LaunchDarkly/Integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. Using flag data from a file --------------------------- + For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. Learn more @@ -146,7 +148,7 @@ Contributing See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md) About LaunchDarkly ------------ +------------------ * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 7450d3b9..8f5d1f09 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -21,7 +21,7 @@ def self.default_prefix # # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default - # Consul client configuration + # Consul client configuration (note that this is exactly the same as modifying `Diplomat.configuration`) # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching From 7ea110fd0662cc835fb3fd007591806765763740 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 13:53:03 -0800 Subject: [PATCH 095/182] rm debugging --- lib/ldclient-rb/impl/integrations/consul_impl.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 5044f33c..82a4fec9 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -87,7 +87,6 @@ def upsert_internal(kind, new_item) if old_value.nil? || old_value == "" mod_index = 0 else - puts("old_value = #{old_value}") old_item = JSON.parse(old_value[0]["Value"], symbolize_names: true) # Check whether the item is stale. If so, don't do the update (and return the existing item to # FeatureStoreWrapper so it can be cached) From 513618735575da8e177ccb871d835f9adaadefce Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 14:47:12 -0800 Subject: [PATCH 096/182] fix initialized check --- lib/ldclient-rb/impl/integrations/consul_impl.rb | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 82a4fec9..4082378f 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -108,8 +108,14 @@ def upsert_internal(kind, new_item) end def initialized_internal? - value = Diplomat::Kv.get(inited_key, {}, :return) - !value.nil? && value != "" + # Unfortunately we need to use exceptions here, instead of the :return parameter, because with + # :return there's no way to distinguish between a missing value and an empty string. + begin + Diplomat::Kv.get(inited_key, {}) + true + rescue Diplomat::KeyNotFound + false + end end def stop From 46ebc1f4a826a1dc13a8140c8b6cd3eceb09db6f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 15:38:19 -0800 Subject: [PATCH 097/182] improve feature store tests + minor Redis fixes --- .../impl/integrations/redis_impl.rb | 17 +- spec/feature_store_spec_base.rb | 232 ++++++++++++------ spec/in_memory_feature_store_spec.rb | 2 +- .../integrations/consul_feature_store_spec.rb | 16 +- .../dynamodb_feature_store_spec.rb | 33 ++- spec/redis_feature_store_spec.rb | 17 +- 6 files changed, 223 insertions(+), 94 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 497b01c5..107340f8 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -48,14 +48,15 @@ def initialize(opts) def init_internal(all_data) count = 0 with_connection do |redis| - all_data.each do |kind, items| - redis.multi do |multi| + redis.multi do |multi| + all_data.each do |kind, items| multi.del(items_key(kind)) count = count + items.count - items.each { |key, item| - redis.hset(items_key(kind), key, item.to_json) - } + items.each do |key, item| + multi.hset(items_key(kind), key, item.to_json) + end end + multi.set(inited_key, inited_key) end end @logger.info { "RedisFeatureStore: initialized with #{count} items" } @@ -112,7 +113,7 @@ def upsert_internal(kind, new_item) end def initialized_internal? - with_connection { |redis| redis.exists(items_key(FEATURES)) } + with_connection { |redis| redis.exists(inited_key) } end def stop @@ -135,6 +136,10 @@ def cache_key(kind, key) kind[:namespace] + ":" + key.to_s end + def inited_key + @prefix + ":$inited" + end + def with_connection @pool.with { |redis| yield(redis) } end diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 0e0f1ca9..8689577f 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,112 +1,200 @@ require "spec_helper" -shared_examples "feature_store" do |create_store_method| +shared_examples "feature_store" do |create_store_method, clear_data_method| - let(:feature0) { + # Rather than testing with feature flag or segment data, we'll use this fake data kind + # to make it clear that feature stores need to be able to handle arbitrary data. + let(:things_kind) { { namespace: "things" } } + + let(:key1) { "thing1" } + let(:thing1) { { - key: "test-feature-flag", + key: key1, + name: "Thing 1", version: 11, - on: true, - prerequisites: [], - salt: "718ea30a918a4eba8734b57ab1a93227", - sel: "fe1244e5378c4f99976c9634e33667c6", - targets: [ - { - values: [ "alice" ], - variation: 0 - }, - { - values: [ "bob" ], - variation: 1 - } - ], - rules: [], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: [ true, false ], deleted: false } } - let(:key0) { feature0[:key].to_sym } + let(:unused_key) { "no" } + + let(:create_store) { create_store_method } # just to avoid a scope issue + let(:clear_data) { clear_data_method } + + def with_store(opts = {}) + s = create_store.call(opts) + begin + yield s + ensure + s.stop + end + end - let!(:store) do - s = create_store_method.call() - s.init(LaunchDarkly::FEATURES => { key0 => feature0 }) - s + def with_inited_store(things) + things_hash = {} + things.each { |thing| things_hash[thing[:key].to_sym] = thing } + + with_store do |s| + s.init({ things_kind => things_hash }) + yield s + end end def new_version_plus(f, deltaVersion, attrs = {}) - f1 = f.clone - f1[:version] = f[:version] + deltaVersion - f1.update(attrs) - f1 + f.clone.merge({ version: f[:version] + deltaVersion }).merge(attrs) end + before(:each) do + clear_data.call if !clear_data.nil? + end - it "is initialized" do - expect(store.initialized?).to eq true + # This block of tests is only run if the clear_data method is defined, meaning that this is a persistent store + # that operates on a database that can be shared with other store instances (as opposed to the in-memory store, + # which has its own private storage). + if !clear_data_method.nil? + it "is not initialized by default" do + with_store do |store| + expect(store.initialized?).to eq false + end + end + + it "can detect if another instance has initialized the store" do + with_store do |store1| + store1.init({}) + with_store do |store2| + expect(store2.initialized?).to eq true + end + end + end + + it "can read data written by another instance" do + with_store do |store1| + store1.init({ things_kind => { key1.to_sym => thing1 } }) + with_store do |store2| + expect(store2.get(things_kind, key1)).to eq thing1 + end + end + end + + it "is independent from other stores with different prefixes" do + with_store({ prefix: "a" }) do |store_a| + store_a.init({ things_kind => { key1.to_sym => thing1 } }) + with_store({ prefix: "b" }) do |store_b| + store_b.init({ things_kind => {} }) + end + with_store({ prefix: "b" }) do |store_b1| # this ensures we're not just reading cached data + expect(store_b1.get(things_kind, key1)).to be_nil + expect(store_a.get(things_kind, key1)).to eq thing1 + end + end + end end - it "can get existing feature with symbol key" do - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "is initialized after calling init" do + with_inited_store([]) do |store| + expect(store.initialized?).to eq true + end end - it "can get existing feature with string key" do - expect(store.get(LaunchDarkly::FEATURES, key0.to_s)).to eq feature0 + it "can get existing item with symbol key" do + with_inited_store([ thing1 ]) do |store| + expect(store.get(things_kind, key1.to_sym)).to eq thing1 + end end - it "gets nil for nonexisting feature" do - expect(store.get(LaunchDarkly::FEATURES, 'nope')).to be_nil + it "can get existing item with string key" do + with_inited_store([ thing1 ]) do |store| + expect(store.get(things_kind, key1.to_s)).to eq thing1 + end end - it "can get all features" do - feature1 = feature0.clone - feature1[:key] = "test-feature-flag1" - feature1[:version] = 5 - feature1[:on] = false - store.upsert(LaunchDarkly::FEATURES, feature1) - expect(store.all(LaunchDarkly::FEATURES)).to eq ({ key0 => feature0, :"test-feature-flag1" => feature1 }) + it "gets nil for nonexisting item" do + with_inited_store([ thing1 ]) do |store| + expect(store.get(things_kind, unused_key)).to be_nil + end + end + + it "returns nil for deleted item" do + deleted_thing = thing1.clone.merge({ deleted: true }) + with_inited_store([ deleted_thing ]) do |store| + expect(store.get(things_kind, key1)).to be_nil + end + end + + it "can get all items" do + key2 = "thing2" + thing2 = { + key: key2, + name: "Thing 2", + version: 22, + deleted: false + } + with_inited_store([ thing1, thing2 ]) do |store| + expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1, key2.to_sym => thing2 }) + end + end + + it "filters out deleted items when getting all" do + key2 = "thing2" + thing2 = { + key: key2, + name: "Thing 2", + version: 22, + deleted: true + } + with_inited_store([ thing1, thing2 ]) do |store| + expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1 }) + end end - it "can add new feature" do - feature1 = feature0.clone - feature1[:key] = "test-feature-flag1" - feature1[:version] = 5 - feature1[:on] = false - store.upsert(LaunchDarkly::FEATURES, feature1) - expect(store.get(LaunchDarkly::FEATURES, :"test-feature-flag1")).to eq feature1 + it "can add new item" do + with_inited_store([]) do |store| + store.upsert(things_kind, thing1) + expect(store.get(things_kind, key1)).to eq thing1 + end end - it "can update feature with newer version" do - f1 = new_version_plus(feature0, 1, { on: !feature0[:on] }) - store.upsert(LaunchDarkly::FEATURES, f1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq f1 + it "can update item with newer version" do + with_inited_store([ thing1 ]) do |store| + thing1_mod = new_version_plus(thing1, 1, { name: thing1[:name] + ' updated' }) + store.upsert(things_kind, thing1_mod) + expect(store.get(things_kind, key1)).to eq thing1_mod + end end - it "cannot update feature with same version" do - f1 = new_version_plus(feature0, 0, { on: !feature0[:on] }) - store.upsert(LaunchDarkly::FEATURES, f1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "cannot update item with same version" do + with_inited_store([ thing1 ]) do |store| + thing1_mod = thing1.clone.merge({ name: thing1[:name] + ' updated' }) + store.upsert(things_kind, thing1_mod) + expect(store.get(things_kind, key1)).to eq thing1 + end end it "cannot update feature with older version" do - f1 = new_version_plus(feature0, -1, { on: !feature0[:on] }) - store.upsert(LaunchDarkly::FEATURES, f1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + with_inited_store([ thing1 ]) do |store| + thing1_mod = new_version_plus(thing1, -1, { name: thing1[:name] + ' updated' }) + store.upsert(things_kind, thing1_mod) + expect(store.get(things_kind, key1)).to eq thing1 + end end - it "can delete feature with newer version" do - store.delete(LaunchDarkly::FEATURES, key0, feature0[:version] + 1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to be_nil + it "can delete item with newer version" do + with_inited_store([ thing1 ]) do |store| + store.delete(things_kind, key1, thing1[:version] + 1) + expect(store.get(things_kind, key1)).to be_nil + end end - it "cannot delete feature with same version" do - store.delete(LaunchDarkly::FEATURES, key0, feature0[:version]) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "cannot delete item with same version" do + with_inited_store([ thing1 ]) do |store| + store.delete(things_kind, key1, thing1[:version]) + expect(store.get(things_kind, key1)).to eq thing1 + end end - it "cannot delete feature with older version" do - store.delete(LaunchDarkly::FEATURES, key0, feature0[:version] - 1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "cannot delete item with older version" do + with_inited_store([ thing1 ]) do |store| + store.delete(things_kind, key1, thing1[:version] - 1) + expect(store.get(things_kind, key1)).to eq thing1 + end end end diff --git a/spec/in_memory_feature_store_spec.rb b/spec/in_memory_feature_store_spec.rb index a1673bbc..c403fc69 100644 --- a/spec/in_memory_feature_store_spec.rb +++ b/spec/in_memory_feature_store_spec.rb @@ -1,7 +1,7 @@ require "feature_store_spec_base" require "spec_helper" -def create_in_memory_store() +def create_in_memory_store(opts = {}) LaunchDarkly::InMemoryFeatureStore.new end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index 1aa6f919..13767686 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -1,5 +1,5 @@ require "feature_store_spec_base" -#require "diplomat" +require "diplomat" require "spec_helper" @@ -7,19 +7,23 @@ $null_log = ::Logger.new($stdout) $null_log.level = ::Logger::FATAL -$base_opts = { +$consul_base_opts = { prefix: $my_prefix, logger: $null_log } def create_consul_store(opts = {}) LaunchDarkly::Integrations::Consul::new_feature_store( - opts.merge($base_opts).merge({ expiration: 60 })) + $consul_base_opts.merge(opts).merge({ expiration: 60 })) end def create_consul_store_uncached(opts = {}) LaunchDarkly::Integrations::Consul::new_feature_store( - opts.merge($base_opts).merge({ expiration: 0 })) + $consul_base_opts.merge(opts).merge({ expiration: 0 })) +end + +def clear_all_data + Diplomat::Kv.delete($my_prefix + '/', recurse: true) end @@ -28,10 +32,10 @@ def create_consul_store_uncached(opts = {}) # These tests will all fail if there isn't a local Consul instance running. context "with local cache" do - include_examples "feature_store", method(:create_consul_store) + include_examples "feature_store", method(:create_consul_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_consul_store_uncached) + include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data) end end diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 38104fb3..4add3d53 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -15,7 +15,7 @@ endpoint: $endpoint } -$base_opts = { +$ddb_base_opts = { dynamodb_opts: $dynamodb_opts, prefix: $my_prefix, logger: $null_log @@ -23,12 +23,35 @@ def create_dynamodb_store(opts = {}) LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - opts.merge($base_opts).merge({ expiration: 60 })) + $ddb_base_opts.merge(opts).merge({ expiration: 60 })) end def create_dynamodb_store_uncached(opts = {}) LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - opts.merge($base_opts).merge({ expiration: 0 })) + $ddb_base_opts.merge(opts).merge({ expiration: 0 })) +end + +def clear_all_data + client = create_test_client + items_to_delete = [] + req = { + table_name: $table_name, + projection_expression: '#namespace, #key', + expression_attribute_names: { + '#namespace' => 'namespace', + '#key' => 'key' + } + } + while true + resp = client.scan(req) + items_to_delete = items_to_delete + resp.items + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + requests = items_to_delete.map do |item| + { delete_request: { key: item } } + end + LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBUtil.batch_write_requests(client, $table_name, requests) end def create_table_if_necessary @@ -72,10 +95,10 @@ def create_test_client create_table_if_necessary context "with local cache" do - include_examples "feature_store", method(:create_dynamodb_store) + include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_dynamodb_store_uncached) + include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) end end diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index d5ccfb65..3da25f4f 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -9,13 +9,22 @@ $null_log = ::Logger.new($stdout) $null_log.level = ::Logger::FATAL +$base_opts = { + prefix: $my_prefix, + logger: $null_log +} def create_redis_store(opts = {}) - LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 60 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 60 })) end def create_redis_store_uncached(opts = {}) - LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 0 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 0 })) +end + +def clear_all_data + client = Redis.new + client.flushdb end @@ -25,11 +34,11 @@ def create_redis_store_uncached(opts = {}) # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do - include_examples "feature_store", method(:create_redis_store) + include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) end context "real Redis without local cache" do - include_examples "feature_store", method(:create_redis_store_uncached) + include_examples "feature_store", method(:create_redis_store_uncached), method(:clear_all_data) end def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) From 97ee2daaa6e6cedb1e74adeab7785dd2759eb68e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Jan 2019 12:21:59 -0800 Subject: [PATCH 098/182] test fix: we can't use Unicode in flag keys anyway --- spec/feature_store_spec_base.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 3580a67f..d004cb54 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -112,7 +112,8 @@ def new_version_plus(f, deltaVersion, attrs = {}) it "stores Unicode data correctly" do flag = { - key: "tęst-feåtūre-flæg😺", + key: "my-fancy-flag", + name: "Tęst Feåtūre Flæg😺", version: 1, deleted: false } From 21a505e366505365458b98c1bd8fd8875183bf80 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Jan 2019 13:56:58 -0800 Subject: [PATCH 099/182] test fix --- spec/feature_store_spec_base.rb | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index b7d15cc5..2d06f0ff 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -205,7 +205,9 @@ def new_version_plus(f, deltaVersion, attrs = {}) version: 1, deleted: false } - store.upsert(LaunchDarkly::FEATURES, flag) - expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + with_inited_store([]) do |store| + store.upsert(LaunchDarkly::FEATURES, flag) + expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + end end end From 784eb07fcea16cf79e36def97ebf4967926b0f05 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Jan 2019 18:37:31 -0800 Subject: [PATCH 100/182] misc prerelease fixes --- lib/ldclient-rb/impl/integrations/consul_impl.rb | 1 + lib/ldclient-rb/integrations/consul.rb | 1 + lib/ldclient-rb/stream.rb | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 4082378f..10c16dbc 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -23,6 +23,7 @@ def initialize(opts) @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? + Diplomat.configuration.url = opts[:url] if !opts[:url].nil? @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") end diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 8f5d1f09..2d46d813 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -22,6 +22,7 @@ def self.default_prefix # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default # Consul client configuration (note that this is exactly the same as modifying `Diplomat.configuration`) + # @option opts [String] :url shortcut for setting the `url` property of the Consul client configuration # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index e4f1b3bd..094a37b2 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -58,7 +58,7 @@ def start conn.on_event { |event| process_message(event) } conn.on_error { |err| case err - when SSE::Errors::HTTPError + when SSE::Errors::HTTPStatusError status = err.status message = Util.http_error_message(status, "streaming connection", "will retry") @config.logger.error { "[LDClient] #{message}" } From e9b06c60c7e46d45487d823325e86804eb4c32fe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 17 Jan 2019 17:15:54 -0800 Subject: [PATCH 101/182] fix doc comments --- lib/ldclient-rb/integrations/consul.rb | 2 +- lib/ldclient-rb/integrations/dynamodb.rb | 2 +- lib/ldclient-rb/integrations/redis.rb | 2 +- lib/ldclient-rb/integrations/util/store_wrapper.rb | 2 +- lib/ldclient-rb/redis_store.rb | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 2d46d813..4f32d5fd 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -25,7 +25,7 @@ def self.default_prefix # @option opts [String] :url shortcut for setting the `url` property of the Consul client configuration # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index ecd87fce..189e118f 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -34,7 +34,7 @@ module DynamoDB # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 34509181..7e447657 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -42,7 +42,7 @@ def self.default_prefix # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @option opts [Object] :pool custom connection pool, if desired # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index eef22d5e..26318d67 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -23,7 +23,7 @@ class CachingStoreWrapper # # @param core [Object] an object that implements the {FeatureStoreCore} methods # @param opts [Hash] a hash that may include cache-related options; all others will be ignored - # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching + # @option opts [Float] :expiration (15) cache TTL; zero means no caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # def initialize(core, opts) diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 392f5d2e..48632411 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -32,7 +32,7 @@ class RedisFeatureStore # @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration_seconds expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally # @option opts [Object] :pool custom connection pool, if desired # From 6389a2663bf7221ca0948261dadd2c00a72fc8df Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 11:38:23 -0800 Subject: [PATCH 102/182] add YARD config so our docs show up correctly everywhere --- .yardopts | 10 ++++++++++ scripts/gendocs.sh | 5 ++--- 2 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 .yardopts diff --git a/.yardopts b/.yardopts new file mode 100644 index 00000000..559b7ab5 --- /dev/null +++ b/.yardopts @@ -0,0 +1,10 @@ +--no-private +--markup markdown +--markup-provider redcarpet +--embed-mixins +lib/*.rb +lib/**/*.rb +lib/**/**/*.rb +lib/**/**/**/*.rb +- +README.md diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh index 1e545955..96df177f 100755 --- a/scripts/gendocs.sh +++ b/scripts/gendocs.sh @@ -6,7 +6,6 @@ gem install --conservative yard gem install --conservative redcarpet # provides Markdown formatting -# yard doesn't seem to do recursive directories, even though Ruby's Dir.glob supposedly recurses for "**" -PATHS="lib/*.rb lib/**/*.rb lib/**/**/*.rb lib/**/**/**/*.rb" +rm -rf doc/* -yard doc --no-private --markup markdown --markup-provider redcarpet --embed-mixins $PATHS - README.md +yard doc From 3406a03430efbd839659aee23d334d48b126da03 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 11:53:38 -0800 Subject: [PATCH 103/182] don't need markup-provider option --- .yardopts | 1 - 1 file changed, 1 deletion(-) diff --git a/.yardopts b/.yardopts index 559b7ab5..5388ac50 100644 --- a/.yardopts +++ b/.yardopts @@ -1,6 +1,5 @@ --no-private --markup markdown ---markup-provider redcarpet --embed-mixins lib/*.rb lib/**/*.rb From d38973acf1dbdda8da4ac529e472ec434a14742f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 29 Jan 2019 10:57:43 -0800 Subject: [PATCH 104/182] rm obsolete proxy param --- lib/ldclient-rb/stream.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 094a37b2..ddb7f669 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -50,7 +50,6 @@ def start } opts = { headers: headers, - proxy: @config.proxy, read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } From def22fc67e8b918cd7cef3006f0d896a7ed4bc68 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 30 Jan 2019 21:38:49 -0800 Subject: [PATCH 105/182] remove net-http-persistent --- Gemfile.lock | 12 +++--------- ldclient-rb.gemspec | 1 - lib/ldclient-rb/events.rb | 17 +++++++++++------ lib/ldclient-rb/polling.rb | 2 +- lib/ldclient-rb/requestor.rb | 15 +++++++++------ lib/ldclient-rb/util.rb | 10 ++++++++++ spec/events_spec.rb | 15 +++++++++++---- 7 files changed, 45 insertions(+), 27 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 2e96a86a..21a65cc1 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,13 +1,10 @@ PATH remote: . specs: - ldclient-rb (5.4.3) + ldclient-rb (5.5.2) concurrent-ruby (~> 1.0) - faraday (>= 0.9, < 2) - faraday-http-cache (>= 1.3.0, < 3) json (>= 1.8, < 3) ld-eventsource (~> 1.0) - net-http-persistent (>= 2.9, < 4.0) semantic (~> 1.6) GEM @@ -35,11 +32,10 @@ GEM docile (1.1.5) faraday (0.15.4) multipart-post (>= 1.2, < 3) - faraday-http-cache (2.0.0) - faraday (~> 0.8) ffi (1.9.25) ffi (1.9.25-java) - hitimes (1.3.0) + hitimes (1.3.1) + hitimes (1.3.1-java) http_tools (0.4.5) jmespath (1.4.0) json (1.8.6) @@ -53,8 +49,6 @@ GEM rb-inotify (~> 0.9, >= 0.9.7) ruby_dep (~> 1.2) multipart-post (2.0.0) - net-http-persistent (3.0.0) - connection_pool (~> 2.2) rake (10.5.0) rb-fsevent (0.10.3) rb-inotify (0.9.10) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 810987a4..9fb4daa0 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -35,7 +35,6 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" - spec.add_runtime_dependency "net-http-persistent", [">= 2.9", "< 4.0"] spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "ld-eventsource", '~> 1.0' end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 02885904..72c82a90 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,7 +1,6 @@ require "concurrent" require "concurrent/atomics" require "concurrent/executors" -require "net/http/persistent" require "thread" require "time" @@ -116,9 +115,10 @@ def initialize(queue, sdk_key, config, client) @sdk_key = sdk_key @config = config - @client = client ? client : Net::HTTP::Persistent.new do |c| - c.open_timeout = @config.connect_timeout - c.read_timeout = @config.read_timeout + if client + @client = client + else + @client = Util.new_http_client(@config.events_uri, @config) end @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @@ -167,7 +167,10 @@ def main_loop(queue, buffer, flush_workers) def do_shutdown(flush_workers) flush_workers.shutdown flush_workers.wait_for_termination - @client.shutdown + begin + @client.finish + rescue + end end def synchronize_for_testing(flush_workers) @@ -322,6 +325,7 @@ def run(sdk_key, config, client, payload, formatter) sleep(1) end begin + client.start if !client.started? config.logger.debug { "[LDClient] sending #{events_out.length} events: #{body}" } uri = URI(config.events_uri + "/bulk") req = Net::HTTP::Post.new(uri) @@ -330,7 +334,8 @@ def run(sdk_key, config, client, payload, formatter) req["Authorization"] = sdk_key req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - res = client.request(uri, req) + req["Connection"] = "keep-alive" + res = client.request(req) rescue StandardError => exn config.logger.warn { "[LDClient] Error flushing events: #{exn.inspect}." } next diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 17ff7c12..da0427dc 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -26,7 +26,7 @@ def start def stop if @stopped.make_true - if @worker && @worker.alive? + if @worker && @worker.alive? && @worker != Thread.current @worker.run # causes the thread to wake up if it's currently in a sleep @worker.join end diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 5f48d7ff..f7174787 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -1,6 +1,6 @@ require "concurrent/atomics" require "json" -require "net/http/persistent" +require "uri" module LaunchDarkly # @private @@ -22,9 +22,7 @@ class Requestor def initialize(sdk_key, config) @sdk_key = sdk_key @config = config - @client = Net::HTTP::Persistent.new - @client.open_timeout = @config.connect_timeout - @client.read_timeout = @config.read_timeout + @client = Util.new_http_client(@config.base_uri, @config) @cache = @config.cache_store end @@ -41,21 +39,26 @@ def request_all_data() end def stop - @client.shutdown + begin + @client.finish + rescue + end end private def make_request(path) + @client.start if !@client.started? uri = URI(@config.base_uri + path) req = Net::HTTP::Get.new(uri) req["Authorization"] = @sdk_key req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + req["Connection"] = "keep-alive" cached = @cache.read(uri) if !cached.nil? req["If-None-Match"] = cached.etag end - res = @client.request(uri, req) + res = @client.request(req) status = res.code.to_i @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index e303e18a..03849957 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,7 +1,17 @@ +require "uri" module LaunchDarkly # @private module Util + def self.new_http_client(uri_s, config) + uri = URI(uri_s) + client = Net::HTTP.new(uri.hostname, uri.port) + client.use_ssl = true if uri.scheme == "https" + client.open_timeout = config.connect_timeout + client.read_timeout = config.read_timeout + client + end + def self.log_exception(logger, message, exc) logger.error { "[LDClient] #{message}: #{exc.inspect}" } logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 86cc67b6..90b91ec9 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -536,7 +536,7 @@ def reset @status = 200 end - def request(uri, req) + def request(req) @requests.push(req) if @exception raise @exception @@ -549,11 +549,18 @@ def request(uri, req) end end - def get_request - @requests.shift + def start + end + + def started? + false end - def shutdown + def finish + end + + def get_request + @requests.shift end end From f5ef9a4630df1444c6744bc33f0f07dbbdddb7cc Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 30 Jan 2019 22:14:48 -0800 Subject: [PATCH 106/182] fix concurrent-ruby usage that breaks on Windows --- lib/ldclient-rb/events.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 72c82a90..c45a9da2 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -124,7 +124,7 @@ def initialize(queue, sdk_key, config, client) @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) - @last_known_past_time = Concurrent::AtomicFixnum.new(0) + @last_known_past_time = Concurrent::AtomicReference.new(0) buffer = EventBuffer.new(config.capacity, config.logger) flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS) From 9b4d75b99549393f8e12d3f0a498870b1bf52b28 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Mon, 4 Feb 2019 12:39:30 -0800 Subject: [PATCH 107/182] add pipeline and clean up with with rm_rf instead of rm --- azure-pipelines.yml | 14 ++++++++++++++ spec/file_data_source_spec.rb | 10 +++++----- 2 files changed, 19 insertions(+), 5 deletions(-) create mode 100644 azure-pipelines.yml diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 00000000..1b9cae48 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,14 @@ +jobs: + - job: build + pool: + vmImage: 'vs2017-win2016' + steps: + - task: PowerShell@2 + inputs: + targetType: inline + script: | + ruby -v + gem install bundler -v 1.17.3 + bundle install + mkdir rspec + bundle exec rspec --exclude-pattern "spec/integrations/*,spec/redis_*" --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 28a0c06f..c827222d 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -74,7 +74,7 @@ segments: seg1: key: seg1 - include: ["user1"] + include: ["user1"] EOF } @@ -87,7 +87,7 @@ end after do - FileUtils.remove_dir(@tmp_dir) + FileUtils.rm_rf(@tmp_dir) end def make_temp_file(content) @@ -198,10 +198,10 @@ def test_auto_reload(options) event = ds.start expect(event.set?).to eq(true) expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) - + sleep(1) IO.write(file, all_properties_json) - + max_time = 10 ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } expect(ok).to eq(true), "Waited #{max_time}s after modifying file and it did not reload" @@ -243,7 +243,7 @@ def test_auto_reload(options) client.close end end - + def wait_for_condition(max_time) deadline = Time.now + max_time while Time.now < deadline From eb4ad9703f5c230375b75dd01c3756ce0b8afcb3 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Mon, 4 Feb 2019 14:52:52 -0800 Subject: [PATCH 108/182] fix highlight blocks --- README.md | 58 +++++++++++++++++++++++++++---------------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index f4dc72b7..df406928 100644 --- a/README.md +++ b/README.md @@ -17,19 +17,19 @@ Quick setup 1. Install the Ruby SDK with `gem` - ```shell +```shell gem install ldclient-rb ``` 2. Require the LaunchDarkly client: - ```ruby +```ruby require 'ldclient-rb' ``` 3. Create a new LDClient with your SDK key: - ```ruby +```ruby client = LaunchDarkly::LDClient.new("your_sdk_key") ``` @@ -39,42 +39,42 @@ client = LaunchDarkly::LDClient.new("your_sdk_key") 2. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: - ```ruby +```ruby Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") ``` 3. You may want to include a function in your ApplicationController - ```ruby - def launchdarkly_settings - if current_user.present? - { - key: current_user.id, - anonymous: false, - email: current_user.email, - custom: { groups: current_user.groups.pluck(:name) }, - # Any other fields you may have - # e.g. lastName: current_user.last_name, - } - else - if Rails::VERSION::MAJOR <= 3 - hash_key = request.session_options[:id] - else - hash_key = session.id - end - # session ids should be private to prevent session hijacking - hash_key = Digest::SHA256.base64digest hash_key - { - key: hash_key, - anonymous: true, - } - end +```ruby +def launchdarkly_settings + if current_user.present? + { + key: current_user.id, + anonymous: false, + email: current_user.email, + custom: { groups: current_user.groups.pluck(:name) }, + # Any other fields you may have + # e.g. lastName: current_user.last_name, + } + else + if Rails::VERSION::MAJOR <= 3 + hash_key = request.session_options[:id] + else + hash_key = session.id end + # session ids should be private to prevent session hijacking + hash_key = Digest::SHA256.base64digest hash_key + { + key: hash_key, + anonymous: true, + } + end +end ``` 4. In your controllers, access the client using - ```ruby +```ruby Rails.application.config.ld_client.variation('your.flag.key', launchdarkly_settings, false) ``` From 4aa6272748587ac362bbe098cb1233acce43148b Mon Sep 17 00:00:00 2001 From: hroederld <46500128+hroederld@users.noreply.github.com> Date: Tue, 5 Feb 2019 00:12:38 +0000 Subject: [PATCH 109/182] Hr/azure3 (#103) * Add Consul and Redis services to Windows. * Enable Consul and Redis testing --- azure-pipelines.yml | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 1b9cae48..40d39abe 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -4,11 +4,37 @@ jobs: vmImage: 'vs2017-win2016' steps: - task: PowerShell@2 + displayName: 'Setup Consul' inputs: targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf consul.zip https://releases.hashicorp.com/consul/1.4.2/consul_1.4.2_windows_amd64.zip + mkdir consul + Expand-Archive -Path consul.zip -DestinationPath consul + cd consul + sc.exe create "Consul" binPath="$(System.DefaultWorkingDirectory)/consul/consul.exe agent -dev" + sc.exe start "Consul" + - task: PowerShell@2 + displayName: 'Setup Redis' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf redis.zip https://github.com/MicrosoftArchive/redis/releases/download/win-3.0.504/Redis-x64-3.0.504.zip + mkdir redis + Expand-Archive -Path redis.zip -DestinationPath redis + cd redis + ./redis-server --service-install + ./redis-server --service-start + - task: PowerShell@2 + displayName: 'Setup SDK and Test' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) script: | ruby -v gem install bundler -v 1.17.3 bundle install mkdir rspec - bundle exec rspec --exclude-pattern "spec/integrations/*,spec/redis_*" --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + bundle exec rspec --exclude-pattern "spec/integrations/dynamodb_feature_store_spec.rb," --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec From fd143d7150d908e1734faa3eccbb3390075a2d38 Mon Sep 17 00:00:00 2001 From: hroederld <46500128+hroederld@users.noreply.github.com> Date: Tue, 5 Feb 2019 10:40:07 -0800 Subject: [PATCH 110/182] add dynamo (#104) --- azure-pipelines.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 40d39abe..3d3fd98a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -3,6 +3,17 @@ jobs: pool: vmImage: 'vs2017-win2016' steps: + - task: PowerShell@2 + displayName: 'Setup Dynamo' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf dynamo.zip https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.zip + mkdir dynamo + Expand-Archive -Path dynamo.zip -DestinationPath dynamo + cd dynamo + javaw -D"java.library.path=./DynamoDBLocal_lib" -jar DynamoDBLocal.jar - task: PowerShell@2 displayName: 'Setup Consul' inputs: @@ -37,4 +48,4 @@ jobs: gem install bundler -v 1.17.3 bundle install mkdir rspec - bundle exec rspec --exclude-pattern "spec/integrations/dynamodb_feature_store_spec.rb," --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec From 6a20ff1c1946992210fd33e1a1f7e997e29e43f5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 22 Feb 2019 11:10:22 -0800 Subject: [PATCH 111/182] add experimentation event overrides for rules and fallthrough --- lib/ldclient-rb/evaluation.rb | 25 +++------ lib/ldclient-rb/impl/event_factory.rb | 77 +++++++++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 44 ++++++--------- spec/evaluation_spec.rb | 74 +++++++++++++------------ spec/ldclient_spec.rb | 57 +++++++++++++++++++- 5 files changed, 193 insertions(+), 84 deletions(-) create mode 100644 lib/ldclient-rb/impl/event_factory.rb diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index f873a6e3..14a7ea55 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -195,22 +195,22 @@ def error_result(errorKind, value = nil) # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns # the default value. Error conditions produce a result with an error reason, not an exception. - def evaluate(flag, user, store, logger) + def evaluate(flag, user, store, logger, event_factory) if user.nil? || user[:key].nil? return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end events = [] - detail = eval_internal(flag, user, store, events, logger) + detail = eval_internal(flag, user, store, events, logger, event_factory) return EvalResult.new(detail, events) end - def eval_internal(flag, user, store, events, logger) + def eval_internal(flag, user, store, events, logger, event_factory) if !flag[:on] return get_off_value(flag, { kind: 'OFF' }, logger) end - prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) + prereq_failure_reason = check_prerequisites(flag, user, store, events, logger, event_factory) if !prereq_failure_reason.nil? return get_off_value(flag, prereq_failure_reason, logger) end @@ -243,7 +243,7 @@ def eval_internal(flag, user, store, events, logger) return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) end - def check_prerequisites(flag, user, store, events, logger) + def check_prerequisites(flag, user, store, events, logger, event_factory) (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -254,25 +254,16 @@ def check_prerequisites(flag, user, store, events, logger) prereq_ok = false else begin - prereq_res = eval_internal(prereq_flag, user, store, events, logger) + prereq_res = eval_internal(prereq_flag, user, store, events, logger, event_factory) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] prereq_ok = false end - event = { - kind: "feature", - key: prereq_key, - variation: prereq_res.variation_index, - value: prereq_res.value, - version: prereq_flag[:version], - prereqOf: flag[:key], - trackEvents: prereq_flag[:trackEvents], - debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] - } + event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) events.push(event) rescue => exn - Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) + Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) prereq_ok = false end end diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb new file mode 100644 index 00000000..6af4c5f8 --- /dev/null +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -0,0 +1,77 @@ + +module LaunchDarkly + module Impl + # Event constructors are centralized here to avoid mistakes and repetitive logic. + # The LDClient owns two instances of EventFactory: one that always embeds evaluation reasons + # in the events (for when variation_detail is called) and one that doesn't. + class EventFactory + def initialize(with_reasons) + @with_reasons = with_reasons + end + + def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) + add_experiment_data = is_experiment(flag, detail.reason) + e = { + kind: 'feature', + key: flag[:key], + user: user, + variation: detail.variation_index, + value: detail.value, + default: default_value, + version: flag[:version] + } + # the following properties are handled separately so we don't waste bandwidth on unused keys + e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] + e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + e[:prereqOf] = prereq_of_flag[:key] if !prereq_of_flag.nil? + e[:reason] = detail.reason if add_experiment_data || @with_reasons + e + end + + def new_default_event(flag, user, default_value, reason) + add_experiment_data = is_experiment(flag, reason) + e = { + kind: 'feature', + key: flag[:key], + user: user, + value: default_value, + default: default_value, + version: flag[:version] + } + e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] + e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + e[:reason] = reason if add_experiment_data || @with_reasons + e + end + + def new_unknown_flag_event(key, user, default_value, reason) + e = { + kind: 'feature', + key: key, + user: user, + value: default_value, + default: default_value + } + e[:reason] = reason if @with_reasons + e + end + + private + + def is_experiment(flag, reason) + return false if !reason + case reason[:kind] + when 'RULE_MATCH' + index = reason[:ruleIndex] + if !index.nil? + rules = flag[:rules] || [] + return index >= 0 && index < rules.length && rules[index][:trackEvents] + end + when 'FALLTHROUGH' + return !!flag[:trackEventsFallthrough] + end + false + end + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a5799700..0c113d0d 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/event_factory" require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" require "digest/sha1" @@ -13,6 +14,7 @@ module LaunchDarkly # class LDClient include Evaluation + include Impl # # Creates a new client instance that connects to LaunchDarkly. A custom # configuration parameter can also supplied to specify advanced options, @@ -32,6 +34,9 @@ class LDClient def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key + @event_factory_default = EventFactory.new(false) + @event_factory_with_reasons = EventFactory.new(true) + # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses # the feature store through the Config object, so we need to make a new Config that uses @@ -165,7 +170,7 @@ def initialized? # @return the variation to show the user, or the default value if there's an an error # def variation(key, user, default) - evaluate_internal(key, user, default, false).value + evaluate_internal(key, user, default, @event_factory_default).value end # @@ -192,7 +197,7 @@ def variation(key, user, default) # @return [EvaluationDetail] an object describing the result # def variation_detail(key, user, default) - evaluate_internal(key, user, default, true) + evaluate_internal(key, user, default, @event_factory_with_reasons) end # @@ -290,7 +295,7 @@ def all_flags_state(user, options={}) next end begin - result = evaluate(f, user, @store, @config.logger) + result = evaluate(f, user, @store, @config.logger, @event_factory_default) state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil, details_only_if_tracked) rescue => exn @@ -330,7 +335,7 @@ def create_default_data_source(sdk_key, config) end # @return [EvaluationDetail] - def evaluate_internal(key, user, default, include_reasons_in_events) + def evaluate_internal(key, user, default, event_factory) if @config.offline? return error_result('CLIENT_NOT_READY', default) end @@ -340,8 +345,9 @@ def evaluate_internal(key, user, default, include_reasons_in_events) @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) - return error_result('CLIENT_NOT_READY', default) + detail = error_result('CLIENT_NOT_READY', default) + @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) + return detail end end @@ -351,20 +357,19 @@ def evaluate_internal(key, user, default, include_reasons_in_events) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } detail = error_result('FLAG_NOT_FOUND', default) - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user, - reason: include_reasons_in_events ? detail.reason : nil) + @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end unless user @config.logger.error { "[LDClient] Must specify user" } detail = error_result('USER_NOT_SPECIFIED', default) - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end begin - res = evaluate(feature, user, @store, @config.logger) + res = evaluate(feature, user, @store, @config.logger, event_factory) if !res.events.nil? res.events.each do |event| @event_processor.add_event(event) @@ -374,12 +379,12 @@ def evaluate_internal(key, user, default, include_reasons_in_events) if detail.default_value? detail = EvaluationDetail.new(default, nil, detail.reason) end - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(event_factory.new_eval_event(feature, user, detail, default)) return detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) detail = error_result('EXCEPTION', default) - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end end @@ -389,21 +394,6 @@ def sanitize_user(user) user[:key] = user[:key].to_s end end - - def make_feature_event(flag, user, detail, default, with_reasons) - { - kind: "feature", - key: flag[:key], - user: user, - variation: detail.variation_index, - value: detail.value, - default: default, - version: flag[:version], - trackEvents: flag[:trackEvents], - debugEventsUntilDate: flag[:debugEventsUntilDate], - reason: with_reasons ? detail.reason : nil - } - end end # diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 3af960c6..c8949b3a 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -7,6 +7,8 @@ let(:features) { LaunchDarkly::InMemoryFeatureStore.new } + let(:factory) { LaunchDarkly::Impl::EventFactory.new(false) } + let(:user) { { key: "userkey", @@ -36,7 +38,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -50,7 +52,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -66,7 +68,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -82,7 +84,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -99,7 +101,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -127,10 +129,9 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -159,10 +160,9 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -189,10 +189,9 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -218,10 +217,9 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -236,7 +234,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -251,7 +249,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -266,7 +264,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -281,7 +279,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -299,7 +297,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -310,7 +308,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(true, 1, { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -321,7 +319,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -332,7 +330,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -343,7 +341,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -355,7 +353,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -366,28 +364,28 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "can match custom attribute" do user = { key: 'x', name: 'Bob', custom: { legs: 4 } } clause = { attribute: 'legs', op: 'in', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "returns false for missing attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'legs', op: 'in', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "returns false for unknown operator" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'unknown', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "does not stop evaluating rules after clause with unknown operator" do @@ -397,14 +395,14 @@ def boolean_flag_with_clauses(clauses) clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } rule1 = { clauses: [ clause1 ], variation: 1 } flag = boolean_flag_with_rules([rule0, rule1]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "retrieves segment from segment store for segmentMatch operator" do @@ -419,14 +417,14 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "can be negated" do @@ -435,7 +433,7 @@ def boolean_flag_with_clauses(clauses) flag = boolean_flag_with_clauses([clause]) expect { clause[:negate] = true - }.to change {evaluate(flag, user, features, logger).detail.value}.from(true).to(false) + }.to change {evaluate(flag, user, features, logger, factory).detail.value}.from(true).to(false) end end @@ -538,7 +536,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x', custom: { foo: value1 } } clause = { attribute: 'foo', op: op, values: [value2] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be shouldBe + expect(evaluate(flag, user, features, logger, factory).detail.value).to be shouldBe end end end @@ -629,7 +627,7 @@ def test_segment_match(segment) features.upsert(LaunchDarkly::SEGMENTS, segment) clause = make_segment_match_clause(segment) flag = boolean_flag_with_clauses([clause]) - evaluate(flag, user, features, logger).detail.value + evaluate(flag, user, features, logger, factory).detail.value end it 'explicitly includes user' do diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index fca81ab0..2916861e 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -91,7 +91,6 @@ def event_processor key: "key", version: 100, user: nil, - variation: nil, value: "default", default: "default", trackEvents: true, @@ -109,7 +108,6 @@ def event_processor key: "key", version: 100, user: bad_user, - variation: nil, value: "default", default: "default", trackEvents: true, @@ -117,6 +115,61 @@ def event_processor )) client.variation("key", bad_user, "default") end + + it "sets trackEvents and reason if trackEvents is set for matched rule" do + flag = { + key: 'flag', + on: true, + variations: [ 'value' ], + version: 100, + rules: [ + clauses: [ + { attribute: 'key', op: 'in', values: [ user[:key] ] } + ], + variation: 0, + id: 'id', + trackEvents: true + ] + } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: 'feature', + key: 'flag', + version: 100, + user: user, + value: 'value', + default: 'default', + trackEvents: true, + reason: { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'id' } + )) + client.variation('flag', user, 'default') + end + + it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do + flag = { + key: 'flag', + on: true, + variations: [ 'value' ], + fallthrough: { variation: 0 }, + version: 100, + rules: [], + trackEventsFallthrough: true + } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: 'feature', + key: 'flag', + version: 100, + user: user, + value: 'value', + default: 'default', + trackEvents: true, + reason: { kind: 'FALLTHROUGH' } + )) + client.variation('flag', user, 'default') + end end describe '#variation_detail' do From 12f541a4a3be42d7ea70d3cb2e6f2571958636f6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 22 Feb 2019 12:04:13 -0800 Subject: [PATCH 112/182] warn & don't send event if identify or track has no valid user --- lib/ldclient-rb/ldclient.rb | 8 +++ spec/fixtures/numeric_key_user.json | 9 ---- spec/fixtures/sanitized_numeric_key_user.json | 9 ---- spec/ldclient_spec.rb | 54 ++++++++++++++++--- 4 files changed, 55 insertions(+), 25 deletions(-) delete mode 100644 spec/fixtures/numeric_key_user.json delete mode 100644 spec/fixtures/sanitized_numeric_key_user.json diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a5799700..28c21869 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -211,6 +211,10 @@ def variation_detail(key, user, default) # @return [void] # def identify(user) + if !user || user[:key].nil? + @config.logger.warn("Identify called with nil user or nil user key!") + return + end sanitize_user(user) @event_processor.add_event(kind: "identify", key: user[:key], user: user) end @@ -229,6 +233,10 @@ def identify(user) # @return [void] # def track(event_name, user, data) + if !user || user[:key].nil? + @config.logger.warn("Track called with nil user or nil user key!") + return + end sanitize_user(user) @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) end diff --git a/spec/fixtures/numeric_key_user.json b/spec/fixtures/numeric_key_user.json deleted file mode 100644 index 2a7ec475..00000000 --- a/spec/fixtures/numeric_key_user.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "key": 33, - "custom":{ - "groups":[ - "microsoft", - "google" - ] - } -} diff --git a/spec/fixtures/sanitized_numeric_key_user.json b/spec/fixtures/sanitized_numeric_key_user.json deleted file mode 100644 index 874e0067..00000000 --- a/spec/fixtures/sanitized_numeric_key_user.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "key": "33", - "custom":{ - "groups":[ - "microsoft", - "google" - ] - } -} diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index fca81ab0..6f530610 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -8,7 +8,8 @@ subject.new("secret", offline_config) end let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } - let(:config) { LaunchDarkly::Config.new({send_events: false, data_source: null_data}) } + let(:logger) { double().as_null_object } + let(:config) { LaunchDarkly::Config.new({ send_events: false, data_source: null_data, logger: logger }) } let(:client) do subject.new("secret", config) end @@ -17,16 +18,31 @@ JSON.parse(data, symbolize_names: true) end let(:user) do - data = File.read(File.join("spec", "fixtures", "user.json")) - JSON.parse(data, symbolize_names: true) + { + key: "user@test.com", + custom: { + groups: [ "microsoft", "google" ] + } + } end let(:numeric_key_user) do - data = File.read(File.join("spec", "fixtures", "numeric_key_user.json")) - JSON.parse(data, symbolize_names: true) + { + key: 33, + custom: { + groups: [ "microsoft", "google" ] + } + } end let(:sanitized_numeric_key_user) do - data = File.read(File.join("spec", "fixtures", "sanitized_numeric_key_user.json")) - JSON.parse(data, symbolize_names: true) + { + key: "33", + custom: { + groups: [ "microsoft", "google" ] + } + } + end + let(:user_without_key) do + { name: "Keyless Joe" } end def event_processor @@ -342,6 +358,18 @@ def event_processor expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.track("custom_event_name", numeric_key_user, nil) end + + it "does not send an event, and logs a warning, if user is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", nil, nil) + end + + it "does not send an event, and logs a warning, if user key is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", user_without_key, nil) + end end describe '#identify' do @@ -354,6 +382,18 @@ def event_processor expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.identify(numeric_key_user) end + + it "does not send an event, and logs a warning, if user is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify(nil) + end + + it "does not send an event, and logs a warning, if user key is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify(user_without_key) + end end describe 'with send_events: false' do From 2800db88876e85dc9bd918b01978f69f135d2207 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 22 Feb 2019 12:18:05 -0800 Subject: [PATCH 113/182] include user in prereq flag events --- lib/ldclient-rb/evaluation.rb | 3 ++- spec/evaluation_spec.rb | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index f873a6e3..1b5bbdca 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -263,6 +263,7 @@ def check_prerequisites(flag, user, store, events, logger) event = { kind: "feature", key: prereq_key, + user: user, variation: prereq_res.variation_index, value: prereq_res.value, version: prereq_flag[:version], @@ -272,7 +273,7 @@ def check_prerequisites(flag, user, store, events, logger) } events.push(event) rescue => exn - Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) + Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) prereq_ok = false end end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 3af960c6..68824ebd 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -127,7 +127,7 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: nil, value: nil, version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) @@ -159,7 +159,7 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) @@ -189,7 +189,7 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) @@ -218,7 +218,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) From 47106d9da24380ec3b7ee630a674a15dfef21dac Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 25 Feb 2019 16:12:29 -0800 Subject: [PATCH 114/182] rm unnecessary logic --- lib/ldclient-rb/impl/event_factory.rb | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 6af4c5f8..83dc76d9 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -29,7 +29,6 @@ def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) end def new_default_event(flag, user, default_value, reason) - add_experiment_data = is_experiment(flag, reason) e = { kind: 'feature', key: flag[:key], @@ -38,9 +37,9 @@ def new_default_event(flag, user, default_value, reason) default: default_value, version: flag[:version] } - e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] + e[:trackEvents] = true if flag[:trackEvents] e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] - e[:reason] = reason if add_experiment_data || @with_reasons + e[:reason] = reason if @with_reasons e end From 763a222eece4e9eec4d8b7e441af62f8c2f4f607 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 26 Feb 2019 15:25:43 -0800 Subject: [PATCH 115/182] more factory methods --- lib/ldclient-rb/impl/event_factory.rb | 20 ++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 4 ++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 83dc76d9..a43f6a33 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -4,6 +4,9 @@ module Impl # Event constructors are centralized here to avoid mistakes and repetitive logic. # The LDClient owns two instances of EventFactory: one that always embeds evaluation reasons # in the events (for when variation_detail is called) and one that doesn't. + # + # Note that these methods do not set the "creationDate" property, because in the Ruby client, + # that is done by EventProcessor.add_event(). class EventFactory def initialize(with_reasons) @with_reasons = with_reasons @@ -55,6 +58,23 @@ def new_unknown_flag_event(key, user, default_value, reason) e end + def new_identify_event(user) + { + kind: 'identify', + key: user[:key], + user: user + } + end + + def new_custom_event(event_name, user, data) + { + kind: 'custom', + key: event_name, + user: user, + data: data + } + end + private def is_experiment(flag, reason) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 0c113d0d..bf396827 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -217,7 +217,7 @@ def variation_detail(key, user, default) # def identify(user) sanitize_user(user) - @event_processor.add_event(kind: "identify", key: user[:key], user: user) + @event_processor.add_event(@event_factory_default.new_identify_event(user)) end # @@ -235,7 +235,7 @@ def identify(user) # def track(event_name, user, data) sanitize_user(user) - @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) + @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data)) end # From 4c234619ce652efcb8658ba5f9c85c728db138b7 Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Wed, 6 Mar 2019 12:48:21 -0800 Subject: [PATCH 116/182] update readme to refer to docs --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index df406928..1c3eaa8a 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,8 @@ require 'ldclient-rb' client = LaunchDarkly::LDClient.new("your_sdk_key") ``` +*NOTE: Please refer to [our documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-initializing-ldclient-using-spring-unicorn-or-puma) for additional instructions on how to use LaunchDarkly with [Spring](https://github.com/rails/spring), [Unicorn](https://bogomips.org/unicorn/), or [Puma](https://github.com/puma/puma).* + ### Ruby on Rails 1. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` From 232f419e2bd69d5a6e46ca2e32b58aed42a4ceb2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Mar 2019 12:03:59 -0700 Subject: [PATCH 117/182] add Ruby 2.6.2 to CI --- .circleci/config.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8201b95d..c6ff6938 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,6 +9,7 @@ workflows: - test-2.3 - test-2.4 - test-2.5 + - test-2.6 - test-jruby-9.2 ruby-docker-template: &ruby-docker-template @@ -57,6 +58,13 @@ jobs: - image: consul - image: redis - image: amazon/dynamodb-local + test-2.6: + <<: *ruby-docker-template + docker: + - image: circleci/ruby:2.6.2-stretch + - image: consul + - image: redis + - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: From 20d109b8a1561ed5a57b4c3fa1836cbbb30852d7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Mar 2019 16:08:50 -0700 Subject: [PATCH 118/182] fix missing require for net/http --- lib/ldclient-rb/util.rb | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 03849957..396a5171 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,3 +1,4 @@ +require "net/http" require "uri" module LaunchDarkly From 651dc37b8d13d75b8cba51d5069fe6af944d776d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 28 Mar 2019 16:12:52 -0700 Subject: [PATCH 119/182] stringify built-in user attributes in events, and secondary key for evals --- lib/ldclient-rb/evaluation.rb | 8 ++++- lib/ldclient-rb/events.rb | 20 +++++++---- lib/ldclient-rb/ldclient.rb | 15 ++------ lib/ldclient-rb/util.rb | 15 ++++++++ spec/evaluation_spec.rb | 19 ++++++++++ spec/events_spec.rb | 65 +++++++++++++++++++++++++++++++++++ spec/ldclient_spec.rb | 26 -------------- 7 files changed, 122 insertions(+), 46 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 1b5bbdca..112aa975 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -189,6 +189,10 @@ def self.comparator(converter) # Used internally to hold an evaluation result and the events that were generated from prerequisites. EvalResult = Struct.new(:detail, :events) + ATTRS_TO_SANITIZE_FOR_EVALUATION = [ :key, :secondary ] + # Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. + # This is because it could affect evaluation results for existing users (ch35206). + def error_result(errorKind, value = nil) EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) end @@ -200,8 +204,10 @@ def evaluate(flag, user, store, logger) return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end + sanitized_user = Util.stringify_attrs(user, ATTRS_TO_SANITIZE_FOR_EVALUATION) + events = [] - detail = eval_internal(flag, user, store, events, logger) + detail = eval_internal(flag, sanitized_user, store, events, logger) return EvalResult.new(detail, events) end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index c45a9da2..69563572 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -7,9 +7,12 @@ module LaunchDarkly MAX_FLUSH_WORKERS = 5 CURRENT_SCHEMA_VERSION = 3 + USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName, + :avatar, :name ] private_constant :MAX_FLUSH_WORKERS private_constant :CURRENT_SCHEMA_VERSION + private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS # @private class NullEventProcessor @@ -219,7 +222,7 @@ def notice_user(user) if user.nil? || !user.has_key?(:key) true else - @user_keys.add(user[:key]) + @user_keys.add(user[:key].to_s) end end @@ -371,6 +374,11 @@ def make_output_events(events, summary) private + def process_user(event) + filtered = @user_filter.transform_user_props(event[:user]) + Util.stringify_attrs(filtered, USER_ATTRS_TO_STRINGIFY_FOR_EVENTS) + end + def make_output_event(event) case event[:kind] when "feature" @@ -386,7 +394,7 @@ def make_output_event(event) out[:version] = event[:version] if event.has_key?(:version) out[:prereqOf] = event[:prereqOf] if event.has_key?(:prereqOf) if @inline_users || is_debug - out[:user] = @user_filter.transform_user_props(event[:user]) + out[:user] = process_user(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end @@ -396,8 +404,8 @@ def make_output_event(event) { kind: "identify", creationDate: event[:creationDate], - key: event[:user].nil? ? nil : event[:user][:key], - user: @user_filter.transform_user_props(event[:user]) + key: event[:user].nil? ? nil : event[:user][:key].to_s, + user: process_user(event) } when "custom" out = { @@ -407,7 +415,7 @@ def make_output_event(event) } out[:data] = event[:data] if event.has_key?(:data) if @inline_users - out[:user] = @user_filter.transform_user_props(event[:user]) + out[:user] = process_user(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end @@ -416,7 +424,7 @@ def make_output_event(event) { kind: "index", creationDate: event[:creationDate], - user: @user_filter.transform_user_props(event[:user]) + user: process_user(event) } else event diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 28c21869..3680619a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -215,7 +215,6 @@ def identify(user) @config.logger.warn("Identify called with nil user or nil user key!") return end - sanitize_user(user) @event_processor.add_event(kind: "identify", key: user[:key], user: user) end @@ -237,7 +236,6 @@ def track(event_name, user, data) @config.logger.warn("Track called with nil user or nil user key!") return end - sanitize_user(user) @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) end @@ -280,8 +278,6 @@ def all_flags_state(user, options={}) return FeatureFlagsState.new(false) end - sanitize_user(user) - begin features = @store.all(FEATURES) rescue => exn @@ -353,7 +349,6 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end - sanitize_user(user) if !user.nil? feature = @store.get(FEATURES, key) if feature.nil? @@ -367,12 +362,12 @@ def evaluate_internal(key, user, default, include_reasons_in_events) unless user @config.logger.error { "[LDClient] Must specify user" } detail = error_result('USER_NOT_SPECIFIED', default) - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(make_feature_event(feature, nil, detail, default, include_reasons_in_events)) return detail end begin - res = evaluate(feature, user, @store, @config.logger) + res = evaluate(feature, user, @store, @config.logger) # note, evaluate will do its own sanitization if !res.events.nil? res.events.each do |event| @event_processor.add_event(event) @@ -392,12 +387,6 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end - def sanitize_user(user) - if user[:key] - user[:key] = user[:key].to_s - end - end - def make_feature_event(flag, user, detail, default, with_reasons) { kind: "feature", diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 396a5171..e129c279 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -4,6 +4,21 @@ module LaunchDarkly # @private module Util + def self.stringify_attrs(hash, attrs) + return hash if hash.nil? + ret = hash + changed = false + attrs.each do |attr| + value = hash[attr] + if !value.nil? && !value.is_a?(String) + ret = hash.clone if !changed + ret[attr] = value.to_s + changed = true + end + end + ret + end + def self.new_http_client(uri_s, config) uri = URI(uri_s) client = Net::HTTP.new(uri.hostname, uri.port) diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 68824ebd..52a617b6 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -359,6 +359,25 @@ def boolean_flag_with_clauses(clauses) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end + + it "coerces user key to a string for evaluation" do + clause = { attribute: 'key', op: 'in', values: ['999'] } + flag = boolean_flag_with_clauses([clause]) + user = { key: 999 } + result = evaluate(flag, user, features, logger) + expect(result.detail.value).to eq(true) + end + + it "coerces secondary key to a string for evaluation" do + # We can't really verify that the rollout calculation works correctly, but we can at least + # make sure it doesn't error out if there's a non-string secondary value (ch35189) + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = evaluate(flag, user, features, logger) + expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) + end end describe "clause" do diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 90b91ec9..557c3594 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -9,6 +9,10 @@ let(:hc) { FakeHttpClient.new } let(:user) { { key: "userkey", name: "Red" } } let(:filtered_user) { { key: "userkey", privateAttrs: [ "name" ] } } + let(:numeric_user) { { key: 1, secondary: 2, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, + avatar: 8, name: 9, anonymous: false, custom: { age: 99 } } } + let(:stringified_numeric_user) { { key: '1', secondary: '2', ip: '3', country: '4', email: '5', firstName: '6', + lastName: '7', avatar: '8', name: '9', anonymous: false, custom: { age: 99 } } } after(:each) do if !@ep.nil? @@ -40,6 +44,21 @@ }) end + it "stringifies built-in user attributes in identify event" do + @ep = subject.new("sdk_key", default_config, hc) + flag = { key: "flagkey", version: 11 } + e = { kind: "identify", key: numeric_user[:key], user: numeric_user } + @ep.add_event(e) + + output = flush_and_get_events + expect(output).to contain_exactly( + kind: "identify", + key: numeric_user[:key].to_s, + creationDate: e[:creationDate], + user: stringified_numeric_user + ) + end + it "queues individual feature event with index event" do @ep = subject.new("sdk_key", default_config, hc) flag = { key: "flagkey", version: 11 } @@ -75,6 +94,23 @@ ) end + it "stringifies built-in user attributes in index event" do + @ep = subject.new("sdk_key", default_config, hc) + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + @ep.add_event(fe) + + output = flush_and_get_events + expect(output).to contain_exactly( + eq(index_event(fe, stringified_numeric_user)), + eq(feature_event(fe, flag, false, nil)), + include(:kind => "summary") + ) + end + it "can include inline user in feature event" do config = LaunchDarkly::Config.new(inline_users_in_events: true) @ep = subject.new("sdk_key", config, hc) @@ -92,6 +128,23 @@ ) end + it "stringifies built-in user attributes in feature event" do + config = LaunchDarkly::Config.new(inline_users_in_events: true) + @ep = subject.new("sdk_key", config, hc) + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + @ep.add_event(fe) + + output = flush_and_get_events + expect(output).to contain_exactly( + eq(feature_event(fe, flag, false, stringified_numeric_user)), + include(:kind => "summary") + ) + end + it "filters user in feature event" do config = LaunchDarkly::Config.new(all_attributes_private: true, inline_users_in_events: true) @ep = subject.new("sdk_key", config, hc) @@ -323,6 +376,18 @@ ) end + it "stringifies built-in user attributes in custom event" do + config = LaunchDarkly::Config.new(inline_users_in_events: true) + @ep = subject.new("sdk_key", config, hc) + e = { kind: "custom", key: "eventkey", user: numeric_user } + @ep.add_event(e) + + output = flush_and_get_events + expect(output).to contain_exactly( + eq(custom_event(e, stringified_numeric_user)) + ) + end + it "does a final flush when shutting down" do @ep = subject.new("sdk_key", default_config, hc) e = { kind: "identify", key: user[:key], user: user } diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 6f530610..86cb5be5 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -25,22 +25,6 @@ } } end - let(:numeric_key_user) do - { - key: 33, - custom: { - groups: [ "microsoft", "google" ] - } - } - end - let(:sanitized_numeric_key_user) do - { - key: "33", - custom: { - groups: [ "microsoft", "google" ] - } - } - end let(:user_without_key) do { name: "Keyless Joe" } end @@ -354,11 +338,6 @@ def event_processor client.track("custom_event_name", user, 42) end - it "sanitizes the user in the event" do - expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) - client.track("custom_event_name", numeric_key_user, nil) - end - it "does not send an event, and logs a warning, if user is nil" do expect(event_processor).not_to receive(:add_event) expect(logger).to receive(:warn) @@ -378,11 +357,6 @@ def event_processor client.identify(user) end - it "sanitizes the user in the event" do - expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) - client.identify(numeric_key_user) - end - it "does not send an event, and logs a warning, if user is nil" do expect(event_processor).not_to receive(:add_event) expect(logger).to receive(:warn) From da15bdd62515fb5ae47c23fd67c7956073465e23 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 28 Mar 2019 16:15:56 -0700 Subject: [PATCH 120/182] make const names consistent --- lib/ldclient-rb/evaluation.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 112aa975..7edef6b2 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -189,7 +189,7 @@ def self.comparator(converter) # Used internally to hold an evaluation result and the events that were generated from prerequisites. EvalResult = Struct.new(:detail, :events) - ATTRS_TO_SANITIZE_FOR_EVALUATION = [ :key, :secondary ] + USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION = [ :key, :secondary ] # Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. # This is because it could affect evaluation results for existing users (ch35206). @@ -204,7 +204,7 @@ def evaluate(flag, user, store, logger) return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end - sanitized_user = Util.stringify_attrs(user, ATTRS_TO_SANITIZE_FOR_EVALUATION) + sanitized_user = Util.stringify_attrs(user, USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION) events = [] detail = eval_internal(flag, sanitized_user, store, events, logger) From 260bd1b8cd75c110dd3f719fb907d255d90a0747 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 13 Apr 2019 16:16:43 -0700 Subject: [PATCH 121/182] support metric value with track() --- lib/ldclient-rb/events.rb | 1 + lib/ldclient-rb/impl/event_factory.rb | 10 ++++++---- lib/ldclient-rb/ldclient.rb | 7 ++++--- spec/events_spec.rb | 3 ++- spec/ldclient_spec.rb | 6 ++++++ 5 files changed, 19 insertions(+), 8 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index c45a9da2..22fdd38c 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -411,6 +411,7 @@ def make_output_event(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end + out[:metricValue] = event[:metricValue] if event.has_key?(:metricValue) out when "index" { diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index a43f6a33..2e7d2697 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -66,13 +66,15 @@ def new_identify_event(user) } end - def new_custom_event(event_name, user, data) - { + def new_custom_event(event_name, user, data, metric_value) + e = { kind: 'custom', key: event_name, - user: user, - data: data + user: user } + e[:data] = data if !data.nil? + e[:metricValue] = metric_value if !metric_value.nil? + e end private diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index bf396827..dc40602c 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -230,12 +230,13 @@ def identify(user) # @param event_name [String] The name of the event # @param user [Hash] The user to register; this can have all the same user properties # described in {#variation} - # @param data [Hash] A hash containing any additional data associated with the event + # @param data [Hash] An optional hash containing any additional data associated with the event + # @param metric_value [Number] An optional numeric value that can be used for analytics purposes # @return [void] # - def track(event_name, user, data) + def track(event_name, user, data = nil, metric_value = nil) sanitize_user(user) - @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data)) + @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data, metric_value)) end # diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 90b91ec9..31b74b08 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -289,7 +289,7 @@ it "queues custom event with user" do @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } + e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" }, metricValue: 1.5 } @ep.add_event(e) output = flush_and_get_events @@ -500,6 +500,7 @@ def custom_event(e, inline_user) else out[:user] = inline_user end + out[:metricValue] = e[:metricValue] if e.has_key?(:metricValue) out end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 2916861e..c07d4023 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -391,6 +391,12 @@ def event_processor client.track("custom_event_name", user, 42) end + it "can include a metric value" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: user, metricValue: 1.5)) + client.track("custom_event_name", user, nil, 1.5) + end + it "sanitizes the user in the event" do expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.track("custom_event_name", numeric_key_user, nil) From 703ffe5ca65e3299a2c7ffbef4baac42ddd08beb Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 16 Apr 2019 18:41:36 -0700 Subject: [PATCH 122/182] update method description --- lib/ldclient-rb/ldclient.rb | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index dc40602c..bd2e1225 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -231,7 +231,10 @@ def identify(user) # @param user [Hash] The user to register; this can have all the same user properties # described in {#variation} # @param data [Hash] An optional hash containing any additional data associated with the event - # @param metric_value [Number] An optional numeric value that can be used for analytics purposes + # @param metric_value [Number] A numeric value used by the LaunchDarkly experimentation + # feature in numeric custom metrics. Can be omitted if this event is used by only + # non-numeric metrics. This field will also be returned as part of the custom event + # for Data Export. # @return [void] # def track(event_name, user, data = nil, metric_value = nil) From 4bc671bd7dbaf400441b6a8a7852f8e45f437c42 Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Mon, 6 May 2019 16:11:43 -0700 Subject: [PATCH 123/182] applying markdown templates and updating repository url references --- CHANGELOG.md | 16 ++--- CONTRIBUTING.md | 39 +++++++++- Gemfile.lock | 4 +- README.md | 169 +++++++------------------------------------- ldclient-rb.gemspec | 2 +- scripts/release.sh | 4 +- 6 files changed, 74 insertions(+), 160 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index af4ffb62..2a4c2269 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,12 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file ## [5.5.5] - 2019-03-28 ### Fixed: -- Setting user attributes to non-string values when a string was expected would cause analytics events not to be processed. Also, in the case of the `secondary` attribute, this could cause evaluations to fail for a flag with a percentage rollout. The SDK will now convert attribute values to strings as needed. ([#131](https://github.com/launchdarkly/ruby-client/issues/131)) +- Setting user attributes to non-string values when a string was expected would cause analytics events not to be processed. Also, in the case of the `secondary` attribute, this could cause evaluations to fail for a flag with a percentage rollout. The SDK will now convert attribute values to strings as needed. ([#131](https://github.com/launchdarkly/ruby-server-sdk/issues/131)) ## [5.5.4] - 2019-03-29 ### Fixed: -- Fixed a missing `require` that could sometimes cause a `NameError` to be thrown when starting the client, depending on what other gems were installed. This bug was introduced in version 5.5.3. ([#129](https://github.com/launchdarkly/ruby-client/issues/129)) -- When an analytics event was generated for a feature flag because it is a prerequisite for another flag that was evaluated, the user data was being omitted from the event. ([#128](https://github.com/launchdarkly/ruby-client/issues/128)) +- Fixed a missing `require` that could sometimes cause a `NameError` to be thrown when starting the client, depending on what other gems were installed. This bug was introduced in version 5.5.3. ([#129](https://github.com/launchdarkly/ruby-server-sdk/issues/129)) +- When an analytics event was generated for a feature flag because it is a prerequisite for another flag that was evaluated, the user data was being omitted from the event. ([#128](https://github.com/launchdarkly/ruby-server-sdk/issues/128)) - If `track` or `identify` is called without a user, the SDK now logs a warning, and does not send an analytics event to LaunchDarkly (since it would not be processed without a user). - Added a link from the SDK readme to the guide regarding the client initialization. @@ -44,7 +44,7 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file ### Fixed: - Added or corrected a large number of documentation comments. All API classes and methods are now documented, and internal implementation details have been hidden from the documentation. You can view the latest documentation on [RubyDoc](https://www.rubydoc.info/gems/ldclient-rb). - Fixed a problem in the Redis feature store that would only happen under unlikely circumstances: trying to evaluate a flag when the LaunchDarkly client had not yet been fully initialized and the store did not yet have data in it, and then trying again when the client was still not ready but the store _did_ have data (presumably put there by another process). Previously, the second attempt would fail. -- In polling mode, the SDK did not correctly handle non-ASCII Unicode characters in feature flag data. ([#90](https://github.com/launchdarkly/ruby-client/issues/90)) +- In polling mode, the SDK did not correctly handle non-ASCII Unicode characters in feature flag data. ([#90](https://github.com/launchdarkly/ruby-server-sdk/issues/90)) ### Deprecated: - `RedisFeatureStore.new`. This implementation class may be changed or moved in the future; use `LaunchDarkly::Integrations::Redis::new_feature_store`. @@ -52,16 +52,16 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file ## [5.4.3] - 2019-01-11 ### Changed: -- The SDK is now compatible with `net-http-persistent` 3.x. (Thanks, [CodingAnarchy](https://github.com/launchdarkly/ruby-client/pull/113)!) +- The SDK is now compatible with `net-http-persistent` 3.x. (Thanks, [CodingAnarchy](https://github.com/launchdarkly/ruby-server-sdk/pull/113)!) ## [5.4.2] - 2019-01-04 ### Fixed: -- Fixed overly specific dependency versions of `concurrent-ruby` and `semantic`. ([#115](https://github.com/launchdarkly/ruby-client/issues/115)) +- Fixed overly specific dependency versions of `concurrent-ruby` and `semantic`. ([#115](https://github.com/launchdarkly/ruby-server-sdk/issues/115)) - Removed obsolete dependencies on `hashdiff` and `thread_safe`. ## [5.4.1] - 2018-11-05 ### Fixed: -- Fixed a `LoadError` in `file_data_source.rb`, which was added in 5.4.0. (Thanks, [kbarrette](https://github.com/launchdarkly/ruby-client/pull/110)!) +- Fixed a `LoadError` in `file_data_source.rb`, which was added in 5.4.0. (Thanks, [kbarrette](https://github.com/launchdarkly/ruby-server-sdk/pull/110)!) ## [5.4.0] - 2018-11-02 @@ -128,7 +128,7 @@ Fixed a regression in version 5.0.0 that could prevent the client from reconnect ## [3.0.2] - 2018-03-06 ## Fixed -- Improved efficiency of logging by not constructing messages that won't be visible at the current log level. (Thanks, [julik](https://github.com/launchdarkly/ruby-client/pull/98)!) +- Improved efficiency of logging by not constructing messages that won't be visible at the current log level. (Thanks, [julik](https://github.com/launchdarkly/ruby-server-sdk/pull/98)!) ## [3.0.1] - 2018-02-26 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c6b8dd20..618877f8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,37 @@ -Contributing to LaunchDarkly SDK for Ruby -========================================= +Contributing to the LaunchDarkly Server-side SDK for Ruby +================================================ -We encourage pull-requests and other contributions from the community. We've also published an [SDK contributor's guide](http://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. +LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. + +Submitting bug reports and feature requests +------------------ + +The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/ruby-server-sdk/issues) in the SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. + +Submitting pull requests +------------------ + +We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. + +Build instructions +------------------ + +### Prerequisites + +This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler -v 1.17.3`. You might need `sudo` to execute the command successfully. As of this writing, the SDK does not support being built with Bundler 2.0. + +### Building + +To build the SDK without running any tests: + +``` +bundle install +``` + +### Testing + +To run all unit tests: + +``` +bundle exec rspec spec +``` \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 21a65cc1..aa131e55 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ PATH remote: . specs: - ldclient-rb (5.5.2) + ldclient-rb (5.5.5) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) ld-eventsource (~> 1.0) @@ -23,7 +23,7 @@ GEM aws-sigv4 (1.0.3) codeclimate-test-reporter (0.6.0) simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.1.4) + concurrent-ruby (1.1.5) connection_pool (2.2.1) diff-lcs (1.3) diplomat (2.0.2) diff --git a/README.md b/README.md index 1c3eaa8a..0fc5a9d2 100644 --- a/README.md +++ b/README.md @@ -1,135 +1,27 @@ -LaunchDarkly SDK for Ruby +LaunchDarkly Server-side SDK for Ruby =========================== [![Gem Version](https://badge.fury.io/rb/ldclient-rb.svg)](http://badge.fury.io/rb/ldclient-rb) -[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-client/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-client/tree/master) -[![Test Coverage](https://codeclimate.com/github/launchdarkly/ruby-client/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/ruby-client/coverage) -[![security](https://hakiri.io/github/launchdarkly/ruby-client/master.svg)](https://hakiri.io/github/launchdarkly/ruby-client/master) +[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) +[![Test Coverage](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/coverage) +[![security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) + +LaunchDarkly overview +------------------------- +[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/docs/getting-started) using LaunchDarkly today! + +[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) Supported Ruby versions ----------------------- This version of the LaunchDarkly SDK has a minimum Ruby version of 2.2.6, or 9.1.6 for JRuby. -Quick setup +Getting started ----------- -1. Install the Ruby SDK with `gem` - -```shell -gem install ldclient-rb -``` - -2. Require the LaunchDarkly client: - -```ruby -require 'ldclient-rb' -``` - -3. Create a new LDClient with your SDK key: - -```ruby -client = LaunchDarkly::LDClient.new("your_sdk_key") -``` - -*NOTE: Please refer to [our documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-initializing-ldclient-using-spring-unicorn-or-puma) for additional instructions on how to use LaunchDarkly with [Spring](https://github.com/rails/spring), [Unicorn](https://bogomips.org/unicorn/), or [Puma](https://github.com/puma/puma).* - -### Ruby on Rails - -1. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` - -2. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: - -```ruby -Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") -``` - -3. You may want to include a function in your ApplicationController - -```ruby -def launchdarkly_settings - if current_user.present? - { - key: current_user.id, - anonymous: false, - email: current_user.email, - custom: { groups: current_user.groups.pluck(:name) }, - # Any other fields you may have - # e.g. lastName: current_user.last_name, - } - else - if Rails::VERSION::MAJOR <= 3 - hash_key = request.session_options[:id] - else - hash_key = session.id - end - # session ids should be private to prevent session hijacking - hash_key = Digest::SHA256.base64digest hash_key - { - key: hash_key, - anonymous: true, - } - end -end -``` - -4. In your controllers, access the client using - -```ruby -Rails.application.config.ld_client.variation('your.flag.key', launchdarkly_settings, false) -``` - -Note that this gem will automatically switch to using the Rails logger it is detected. - - -Your first feature flag ------------------------ - -1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com). -2. In your application code, use the feature's key to check whether the flag is on for each user: - -```ruby -if client.variation("your.flag.key", {key: "user@test.com"}, false) - # application code to show the feature -else - # the code to run if the feature is off -end -``` - -HTTPS proxy ------------ - -The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. (HTTP_PROXY is not used because all LaunchDarkly services require HTTPS.) - -How to set the HTTPS_PROXY environment variable on Mac/Linux systems: -``` -export HTTPS_PROXY=https://web-proxy.domain.com:8080 -``` - -How to set the HTTPS_PROXY environment variable on Windows systems: -``` -set HTTPS_PROXY=https://web-proxy.domain.com:8080 -``` - -If your proxy requires authentication then you can prefix the URN with your login information: -``` -export HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 -``` -or -``` -set HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 -``` - -Database integrations ---------------------- - -Feature flag data can be kept in a persistent store using Redis, DynamoDB, or Consul. These adapters are implemented in the `LaunchDarkly::Integrations::Redis`, `LaunchDarkly::Integrations::DynamoDB`, and `LaunchDarkly::Integrations::Consul` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [API documentation](https://www.rubydoc.info/gems/ldclient-rb/LaunchDarkly/Integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. - -Using flag data from a file ---------------------------- - -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See `LaunchDarkly::FileDataSource` or the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. +Refer to the [SDK documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-getting-started) for instructions on getting started with using the SDK. Learn more ----------- @@ -140,37 +32,26 @@ Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/l Testing ------- - + We run integration tests for all our SDKs using a centralized test harness. This approach gives us the ability to test for consistency across SDKs, as well as test networking behavior in a long-running application. These tests cover each method in the SDK, and verify that event sending, flag evaluation, stream reconnection, and other aspects of the SDK all behave correctly. - + Contributing ------------ - -See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md). - + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK. + About LaunchDarkly ------------------- - +----------- + * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. -* LaunchDarkly provides feature flag SDKs for - * [Java](http://docs.launchdarkly.com/docs/java-sdk-reference "Java SDK") - * [JavaScript](http://docs.launchdarkly.com/docs/js-sdk-reference "LaunchDarkly JavaScript SDK") - * [PHP](http://docs.launchdarkly.com/docs/php-sdk-reference "LaunchDarkly PHP SDK") - * [Python](http://docs.launchdarkly.com/docs/python-sdk-reference "LaunchDarkly Python SDK") - * [Go](http://docs.launchdarkly.com/docs/go-sdk-reference "LaunchDarkly Go SDK") - * [Node.JS](http://docs.launchdarkly.com/docs/node-sdk-reference "LaunchDarkly Node SDK") - * [Electron](http://docs.launchdarkly.com/docs/electron-sdk-reference "LaunchDarkly Electron SDK") - * [.NET](http://docs.launchdarkly.com/docs/dotnet-sdk-reference "LaunchDarkly .Net SDK") - * [Ruby](http://docs.launchdarkly.com/docs/ruby-sdk-reference "LaunchDarkly Ruby SDK") - * [iOS](http://docs.launchdarkly.com/docs/ios-sdk-reference "LaunchDarkly iOS SDK") - * [Android](http://docs.launchdarkly.com/docs/android-sdk-reference "LaunchDarkly Android SDK") +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Check out [our documentation](https://docs.launchdarkly.com/docs) for a complete list. * Explore LaunchDarkly - * [launchdarkly.com](http://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information - * [docs.launchdarkly.com](http://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDKs - * [apidocs.launchdarkly.com](http://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation - * [blog.launchdarkly.com](http://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates - * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates + * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies \ No newline at end of file diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 9fb4daa0..d1a19483 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -12,7 +12,7 @@ Gem::Specification.new do |spec| spec.email = ["team@launchdarkly.com"] spec.summary = "LaunchDarkly SDK for Ruby" spec.description = "Official LaunchDarkly SDK for Ruby" - spec.homepage = "https://github.com/launchdarkly/ruby-client" + spec.homepage = "https://github.com/launchdarkly/ruby-server-sdk" spec.license = "Apache-2.0" spec.files = `git ls-files -z`.split("\x0") diff --git a/scripts/release.sh b/scripts/release.sh index 18537846..314fe8b9 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -9,7 +9,7 @@ # When done you should commit and push the changes made. set -uxe -echo "Starting ruby-client release." +echo "Starting ruby-server-sdk release." VERSION=$1 @@ -24,4 +24,4 @@ gem build ldclient-rb.gemspec # Publish Ruby Gem gem push ldclient-rb-${VERSION}.gem -echo "Done with ruby-client release" \ No newline at end of file +echo "Done with ruby-server-sdk release" \ No newline at end of file From a56ee201360abd3a9c3ad9cb55cc6a7c9d493fef Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Tue, 7 May 2019 12:55:19 -0700 Subject: [PATCH 124/182] Cleaning up markdown files --- CONTRIBUTING.md | 4 +--- README.md | 3 +-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 618877f8..6ed90ddb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -20,9 +20,7 @@ Build instructions This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler -v 1.17.3`. You might need `sudo` to execute the command successfully. As of this writing, the SDK does not support being built with Bundler 2.0. -### Building - -To build the SDK without running any tests: +To install the runtime dependencies: ``` bundle install diff --git a/README.md b/README.md index 0fc5a9d2..7795ddb9 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,7 @@ LaunchDarkly Server-side SDK for Ruby [![Gem Version](https://badge.fury.io/rb/ldclient-rb.svg)](http://badge.fury.io/rb/ldclient-rb) [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) -[![Test Coverage](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/coverage) -[![security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) +[![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) LaunchDarkly overview ------------------------- From 962e729b88f81efdf61d3905aa2a888bfed6e8e7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 8 May 2019 18:01:29 -0700 Subject: [PATCH 125/182] allow skipping database tests --- CONTRIBUTING.md | 4 +++- spec/integrations/consul_feature_store_spec.rb | 1 + spec/integrations/dynamodb_feature_store_spec.rb | 3 ++- spec/redis_feature_store_spec.rb | 2 ++ 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6ed90ddb..ac126eec 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -32,4 +32,6 @@ To run all unit tests: ``` bundle exec rspec spec -``` \ No newline at end of file +``` + +By default, the full unit test suite includes live tests of the integrations for Consul, DynamoDB, and Redis. Those tests expect you to have instances of all of those databases running locally. To skip them, set the environment variable `LD_SKIP_DATABASE_TESTS=1` before running the tests. diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index 13767686..45f87097 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -28,6 +28,7 @@ def clear_all_data describe "Consul feature store" do + return if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local Consul instance running. diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 4add3d53..d924b30a 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -89,7 +89,8 @@ def create_test_client describe "DynamoDB feature store" do - + return if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + # These tests will all fail if there isn't a local DynamoDB instance running. create_table_if_necessary diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index 3da25f4f..0f372184 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -31,6 +31,8 @@ def clear_all_data describe LaunchDarkly::RedisFeatureStore do subject { LaunchDarkly::RedisFeatureStore } + return if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do From f32b9c694b8ceb6f761fd068d085b2cce05200fc Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Mon, 13 May 2019 13:28:36 -0700 Subject: [PATCH 126/182] Updating the package name (#115) * update package name * missed one * revert module entry point name change --- Gemfile.lock | 4 ++-- README.md | 4 ++-- ldclient-rb.gemspec => launchdarkly-server-sdk.gemspec | 2 +- scripts/gendocs.sh | 2 +- scripts/release.sh | 8 ++++---- 5 files changed, 10 insertions(+), 10 deletions(-) rename ldclient-rb.gemspec => launchdarkly-server-sdk.gemspec (97%) diff --git a/Gemfile.lock b/Gemfile.lock index aa131e55..a076f848 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ PATH remote: . specs: - ldclient-rb (5.5.5) + launchdarkly-server-sdk (5.5.6) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) ld-eventsource (~> 1.0) @@ -90,7 +90,7 @@ DEPENDENCIES codeclimate-test-reporter (~> 0) connection_pool (>= 2.1.2) diplomat (>= 2.0.2) - ldclient-rb! + launchdarkly-server-sdk! listen (~> 3.0) rake (~> 10.0) redis (~> 3.3.5) diff --git a/README.md b/README.md index 7795ddb9..d3f99b69 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ LaunchDarkly Server-side SDK for Ruby =========================== -[![Gem Version](https://badge.fury.io/rb/ldclient-rb.svg)](http://badge.fury.io/rb/ldclient-rb) +[![Gem Version](https://badge.fury.io/rb/launchdarkly-server-sdk.svg)](http://badge.fury.io/rb/launchdarkly-server-sdk) [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) [![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) @@ -27,7 +27,7 @@ Learn more Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). -Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/ldclient-rb). +Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/launchdarkly-server-sdk). Testing ------- diff --git a/ldclient-rb.gemspec b/launchdarkly-server-sdk.gemspec similarity index 97% rename from ldclient-rb.gemspec rename to launchdarkly-server-sdk.gemspec index d1a19483..911e438b 100644 --- a/ldclient-rb.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -6,7 +6,7 @@ require "ldclient-rb/version" # rubocop:disable Metrics/BlockLength Gem::Specification.new do |spec| - spec.name = "ldclient-rb" + spec.name = "launchdarkly-server-sdk" spec.version = LaunchDarkly::VERSION spec.authors = ["LaunchDarkly"] spec.email = ["team@launchdarkly.com"] diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh index 96df177f..c5ec7dcf 100755 --- a/scripts/gendocs.sh +++ b/scripts/gendocs.sh @@ -1,7 +1,7 @@ #!/bin/bash # Use this script to generate documentation locally in ./doc so it can be proofed before release. -# After release, documentation will be visible at https://www.rubydoc.info/gems/ldclient-rb +# After release, documentation will be visible at https://www.rubydoc.info/gems/launchdarkly-server-sdk gem install --conservative yard gem install --conservative redcarpet # provides Markdown formatting diff --git a/scripts/release.sh b/scripts/release.sh index 314fe8b9..9813240c 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# This script updates the version for the ldclient library and releases it to RubyGems +# This script updates the version for the launchdarkly-server-sdk library and releases it to RubyGems # It will only work if you have the proper credentials set up in ~/.gem/credentials # It takes exactly one argument: the new version. @@ -13,15 +13,15 @@ echo "Starting ruby-server-sdk release." VERSION=$1 -#Update version in ldclient/version.py +#Update version in lib/ldclient-rb/version.rb VERSION_RB_TEMP=./version.rb.tmp sed "s/VERSION =.*/VERSION = \"${VERSION}\"/g" lib/ldclient-rb/version.rb > ${VERSION_RB_TEMP} mv ${VERSION_RB_TEMP} lib/ldclient-rb/version.rb # Build Ruby Gem -gem build ldclient-rb.gemspec +gem build launchdarkly-server-sdk.gemspec # Publish Ruby Gem -gem push ldclient-rb-${VERSION}.gem +gem push launchdarkly-server-sdk-${VERSION}.gem echo "Done with ruby-server-sdk release" \ No newline at end of file From 8defb308a44a25f056fcd7260393e25e8a277dbe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 10 Jul 2019 17:49:38 -0700 Subject: [PATCH 127/182] bump ld-eventsource version for stream logging fix --- Gemfile.lock | 6 +++--- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index a076f848..155eccf2 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,10 +1,10 @@ PATH remote: . specs: - launchdarkly-server-sdk (5.5.6) + launchdarkly-server-sdk (5.5.7) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) - ld-eventsource (~> 1.0) + ld-eventsource (= 1.0.1) semantic (~> 1.6) GEM @@ -40,7 +40,7 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) - ld-eventsource (1.0.0) + ld-eventsource (1.0.1) concurrent-ruby (~> 1.0) http_tools (~> 0.4.5) socketry (~> 0.5.1) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 911e438b..2e95cd41 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -36,5 +36,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" - spec.add_runtime_dependency "ld-eventsource", '~> 1.0' + spec.add_runtime_dependency "ld-eventsource", "1.0.1" end From c1aeaa3a8f35c7fb375507c4886b6e874e36ef69 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 11:49:41 -0700 Subject: [PATCH 128/182] use YAML.safe_load --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 7606c1d3..d5e05ae0 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -206,7 +206,7 @@ def parse_content(content) # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.load(content)) + symbolize_all_keys(YAML.safe_load(content)) end def symbolize_all_keys(value) From f8aac44e2b4b0b2022ec845cd8eef68b57e3b8cd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 12:25:34 -0700 Subject: [PATCH 129/182] add unit test and temporarily revert fix to demonstrate failure --- lib/ldclient-rb/file_data_source.rb | 2 +- spec/file_data_source_spec.rb | 28 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index d5e05ae0..7606c1d3 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -206,7 +206,7 @@ def parse_content(content) # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.safe_load(content)) + symbolize_all_keys(YAML.load(content)) end def symbolize_all_keys(value) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index c827222d..837b775d 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -1,6 +1,14 @@ require "spec_helper" require "tempfile" +# see does not allow Ruby objects in YAML" for the purpose of the following two things +$created_bad_class = false +class BadClassWeShouldNotInstantiate < Hash + def []=(key, value) + $created_bad_class = true + end +end + describe LaunchDarkly::FileDataSource do let(:full_flag_1_key) { "flag1" } let(:full_flag_1_value) { "on" } @@ -78,6 +86,12 @@ EOF } + let(:unsafe_yaml) { <<-EOF +--- !ruby/hash:BadClassWeShouldNotInstantiate +foo: bar +EOF + } + let(:bad_file_path) { "no-such-file" } before do @@ -138,6 +152,20 @@ def with_data_source(options) end end + it "does not allow Ruby objects in YAML" do + # This tests for the vulnerability described here: https://trailofbits.github.io/rubysec/yaml/index.html + # The file we're loading contains a hash with a custom Ruby class, BadClassWeShouldNotInstantiate (see top + # of file). If we're not loading in safe mode, it will create an instance of that class and call its []= + # method, which we've defined to set $created_bad_class to true. In safe mode, it refuses to parse this file. + file = make_temp_file(unsafe_yaml) + with_data_source({ paths: [file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(ds.initialized?).to eq(false) + expect($created_bad_class).to eq(false) + end + end + it "sets start event and initialized on successful load" do file = make_temp_file(all_properties_json) with_data_source({ paths: [ file.path ] }) do |ds| From 8f480604b3b96f7bc3070bf49996f23078a871f2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 12:27:52 -0700 Subject: [PATCH 130/182] restore fix --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 7606c1d3..d5e05ae0 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -206,7 +206,7 @@ def parse_content(content) # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.load(content)) + symbolize_all_keys(YAML.safe_load(content)) end def symbolize_all_keys(value) From 8fa005fee2d0800c5da76e745d82e0476cadd6af Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 13:08:50 -0700 Subject: [PATCH 131/182] add comment about not using FileDataSource in production --- lib/ldclient-rb/file_data_source.rb | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index d5e05ae0..cfea75f7 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -21,9 +21,11 @@ def self.have_listen? end # - # Provides a way to use local files as a source of feature flag state. This would typically be - # used in a test environment, to operate using a predetermined feature flag state without an - # actual LaunchDarkly connection. + # Provides a way to use local files as a source of feature flag state. This allows using a + # predetermined feature flag state without an actual LaunchDarkly connection. + # + # Reading flags from a file is only intended for pre-production environments. Production + # environments should always be configured to receive flag updates from LaunchDarkly. # # To use this component, call {FileDataSource#factory}, and store its return value in the # {Config#data_source} property of your LaunchDarkly client configuration. In the options From 963e4ebadf716b5014dc9f60e8fa0c445bd07a9b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 20:31:58 -0700 Subject: [PATCH 132/182] drop events if inbox is full --- lib/ldclient-rb/events.rb | 87 ++++++++++++++++++++++++++++----------- 1 file changed, 62 insertions(+), 25 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 69563572..f57287a4 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -4,6 +4,23 @@ require "thread" require "time" +# +# Analytics event processing in the SDK involves several components. The purpose of this design is to +# minimize overhead on the application threads that are generating analytics events. +# +# EventProcessor receives an analytics event from the SDK client, on an application thread. It places +# the event in a bounded queue, the "inbox", and immediately returns. +# +# On a separate worker thread, EventDispatcher consumes events from the inbox. These are considered +# "input events" because they may or may not actually be sent to LaunchDarkly; most flag evaluation +# events are not sent, but are counted and the counters become part of a single summary event. +# EventDispatcher updates those counters, creates "index" events for any users that have not been seen +# recently, and places any events that will be sent to LaunchDarkly into the "outbox" queue. +# +# When it is time to flush events to LaunchDarkly, the contents of the outbox are handed off to +# another worker thread which sends the HTTP request. +# + module LaunchDarkly MAX_FLUSH_WORKERS = 5 CURRENT_SCHEMA_VERSION = 3 @@ -68,28 +85,30 @@ class StopMessage < SynchronousMessage # @private class EventProcessor def initialize(sdk_key, config, client = nil) - @queue = Queue.new + @logger = config.logger + @inbox = SizedQueue.new(config.capacity) @flush_task = Concurrent::TimerTask.new(execution_interval: config.flush_interval) do - @queue << FlushMessage.new + post_to_inbox(FlushMessage.new) end @flush_task.execute @users_flush_task = Concurrent::TimerTask.new(execution_interval: config.user_keys_flush_interval) do - @queue << FlushUsersMessage.new + post_to_inbox(FlushUsersMessage.new) end @users_flush_task.execute @stopped = Concurrent::AtomicBoolean.new(false) - - EventDispatcher.new(@queue, sdk_key, config, client) + @inbox_full = Concurrent::AtomicBoolean.new(false) + + EventDispatcher.new(@inbox, sdk_key, config, client) end def add_event(event) event[:creationDate] = (Time.now.to_f * 1000).to_i - @queue << EventMessage.new(event) + post_to_inbox(EventMessage.new(event)) end def flush # flush is done asynchronously - @queue << FlushMessage.new + post_to_inbox(FlushMessage.new) end def stop @@ -97,9 +116,11 @@ def stop if @stopped.make_true @flush_task.shutdown @users_flush_task.shutdown - @queue << FlushMessage.new + # Note that here we are not calling post_to_inbox, because we *do* want to wait if the inbox + # is full; an orderly shutdown can't happen unless these messages are received. + @inbox << FlushMessage.new stop_msg = StopMessage.new - @queue << stop_msg + @inbox << stop_msg stop_msg.wait_for_completion end end @@ -107,14 +128,30 @@ def stop # exposed only for testing def wait_until_inactive sync_msg = TestSyncMessage.new - @queue << sync_msg + @inbox << sync_msg sync_msg.wait_for_completion end + + private + + def post_to_inbox(message) + begin + @inbox.push(message, non_block=true) + rescue ThreadError + # If the inbox is full, it means the EventDispatcher thread is seriously backed up with not-yet-processed + # events. This is unlikely, but if it happens, it means the application is probably doing a ton of flag + # evaluations across many threads-- so if we wait for a space in the inbox, we risk a very serious slowdown + # of the app. To avoid that, we'll just drop the event. The log warning about this will only be shown once. + if @inbox_full.make_true + @logger.warn { "[LDClient] Events are being produced faster than they can be processed; some events will be dropped" } + end + end + end end # @private class EventDispatcher - def initialize(queue, sdk_key, config, client) + def initialize(inbox, sdk_key, config, client) @sdk_key = sdk_key @config = config @@ -129,10 +166,10 @@ def initialize(queue, sdk_key, config, client) @disabled = Concurrent::AtomicBoolean.new(false) @last_known_past_time = Concurrent::AtomicReference.new(0) - buffer = EventBuffer.new(config.capacity, config.logger) + outbox = EventBuffer.new(config.capacity, config.logger) flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS) - Thread.new { main_loop(queue, buffer, flush_workers) } + Thread.new { main_loop(inbox, outbox, flush_workers) } end private @@ -141,16 +178,16 @@ def now_millis() (Time.now.to_f * 1000).to_i end - def main_loop(queue, buffer, flush_workers) + def main_loop(inbox, outbox, flush_workers) running = true while running do begin - message = queue.pop + message = inbox.pop case message when EventMessage - dispatch_event(message.event, buffer) + dispatch_event(message.event, outbox) when FlushMessage - trigger_flush(buffer, flush_workers) + trigger_flush(outbox, flush_workers) when FlushUsersMessage @user_keys.clear when TestSyncMessage @@ -181,11 +218,11 @@ def synchronize_for_testing(flush_workers) flush_workers.wait_all end - def dispatch_event(event, buffer) + def dispatch_event(event, outbox) return if @disabled.value # Always record the event in the summary. - buffer.add_to_summary(event) + outbox.add_to_summary(event) # Decide whether to add the event to the payload. Feature events may be added twice, once for # the event (if tracked) and once for debugging. @@ -205,7 +242,7 @@ def dispatch_event(event, buffer) # an identify event for that user. if !(will_add_full_event && @config.inline_users_in_events) if event.has_key?(:user) && !notice_user(event[:user]) && event[:kind] != "identify" - buffer.add_event({ + outbox.add_event({ kind: "index", creationDate: event[:creationDate], user: event[:user] @@ -213,8 +250,8 @@ def dispatch_event(event, buffer) end end - buffer.add_event(event) if will_add_full_event - buffer.add_event(debug_event) if !debug_event.nil? + outbox.add_event(event) if will_add_full_event + outbox.add_event(debug_event) if !debug_event.nil? end # Add to the set of users we've noticed, and return true if the user was already known to us. @@ -236,12 +273,12 @@ def should_debug_event(event) end end - def trigger_flush(buffer, flush_workers) + def trigger_flush(outbox, flush_workers) if @disabled.value return end - payload = buffer.get_payload + payload = outbox.get_payload if !payload.events.empty? || !payload.summary.counters.empty? # If all available worker threads are busy, success will be false and no job will be queued. success = flush_workers.post do @@ -252,7 +289,7 @@ def trigger_flush(buffer, flush_workers) Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end - buffer.clear if success # Reset our internal state, these events now belong to the flush worker + outbox.clear if success # Reset our internal state, these events now belong to the flush worker end end From f0581a0120c987f9af5b1e42c09cffe2fb486ac8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 15:39:48 -0700 Subject: [PATCH 133/182] update doc comment for track with metric_value --- lib/ldclient-rb/ldclient.rb | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index f75c8930..b7c2ee85 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -231,6 +231,11 @@ def identify(user) # Note that event delivery is asynchronous, so the event may not actually be sent # until later; see {#flush}. # + # As of this version’s release date, the LaunchDarkly service does not support the `metricValue` + # parameter. As a result, specifying `metricValue` will not yet produce any different behavior + # from omitting it. Refer to the [SDK reference guide](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-track) + # for the latest status. + # # @param event_name [String] The name of the event # @param user [Hash] The user to register; this can have all the same user properties # described in {#variation} From 7620721cdee390659cd86bd679c47b3d9781f9e9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 30 Dec 2019 13:59:17 -0800 Subject: [PATCH 134/182] don't let user fall outside of last bucket in rollout --- lib/ldclient-rb/evaluation.rb | 18 ++++++++---- spec/evaluation_spec.rb | 52 +++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 43a03c23..d0d2aa38 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -323,20 +323,28 @@ def clause_match_user_no_segments(clause, user) end def variation_index_for_user(flag, rule, user) - if !rule[:variation].nil? # fixed variation - return rule[:variation] - elsif !rule[:rollout].nil? # percentage rollout + variation = rule[:variation] + return variation if !variation.nil? # fixed variation + rollout = rule[:rollout] + return nil if rollout.nil? + variations = rollout[:variations] + if !variations.nil? && variations.length > 0 # percentage rollout rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt]) sum = 0; - rollout[:variations].each do |variate| + variations.each do |variate| sum += variate[:weight].to_f / 100000.0 if bucket < sum return variate[:variation] end end - nil + # The user's bucket value was greater than or equal to the end of the last bucket. This could happen due + # to a rounding error, or due to the fact that we are scaling to 100000 rather than 99999, or the flag + # data could contain buckets that don't actually add up to 100000. Rather than returning an error in + # this case (or changing the scaling, which would potentially change the results for *all* users), we + # will simply put the user in the last bucket. + variations[-1][:variation] else # the rule isn't well-formed nil end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index ff4b63f6..2efbd745 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -560,6 +560,58 @@ def boolean_flag_with_clauses(clauses) end end + describe "variation_index_for_user" do + it "matches bucket" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, + # so we can construct a rollout whose second bucket just barely contains that value + bucket_value = (bucket_user(user, flag_key, "key", salt) * 100000).truncate() + expect(bucket_value).to be > 0 + expect(bucket_value).to be < 100000 + + bad_variation_a = 0 + matched_variation = 1 + bad_variation_b = 2 + rule = { + rollout: { + variations: [ + { variation: bad_variation_a, weight: bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value + { variation: matched_variation, weight: 1 }, # size of this bucket is 1, so it only matches that specific value + { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation = variation_index_for_user(flag, rule, user) + expect(result_variation).to be matched_variation + end + + it "uses last bucket if bucket value is equal to total weight" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + bucket_value = (bucket_user(user, flag_key, "key", salt) * 100000).truncate() + + # We'll construct a list of variations that stops right at the target bucket value + rule = { + rollout: { + variations: [ + { variation: 0, weight: bucket_value } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation = variation_index_for_user(flag, rule, user) + expect(result_variation).to be 0 + end + end + describe "bucket_user" do it "gets expected bucket values for specific keys" do user = { key: "userKeyA" } From fbf8eb9d58aa34bf4cd21c8c9d24e1e62615c922 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 12:08:44 -0800 Subject: [PATCH 135/182] refactor evaluation logic and move it out of the main namespace --- lib/ldclient-rb.rb | 1 - lib/ldclient-rb/evaluation.rb | 445 ------------ lib/ldclient-rb/evaluation_detail.rb | 80 +++ lib/ldclient-rb/impl/evaluator.rb | 223 ++++++ lib/ldclient-rb/impl/evaluator_bucketing.rb | 32 + lib/ldclient-rb/impl/evaluator_operators.rb | 128 ++++ lib/ldclient-rb/ldclient.rb | 20 +- spec/evaluation_spec.rb | 737 -------------------- spec/impl/evaluator_bucketing_spec.rb | 59 ++ spec/impl/evaluator_operators_spec.rb | 106 +++ spec/impl/evaluator_spec.rb | 600 ++++++++++++++++ 11 files changed, 1240 insertions(+), 1191 deletions(-) delete mode 100644 lib/ldclient-rb/evaluation.rb create mode 100644 lib/ldclient-rb/evaluation_detail.rb create mode 100644 lib/ldclient-rb/impl/evaluator.rb create mode 100644 lib/ldclient-rb/impl/evaluator_bucketing.rb create mode 100644 lib/ldclient-rb/impl/evaluator_operators.rb delete mode 100644 spec/evaluation_spec.rb create mode 100644 spec/impl/evaluator_bucketing_spec.rb create mode 100644 spec/impl/evaluator_operators_spec.rb create mode 100644 spec/impl/evaluator_spec.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index e5477ecb..9a215686 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -8,7 +8,6 @@ module LaunchDarkly require "ldclient-rb/version" require "ldclient-rb/interfaces" require "ldclient-rb/util" -require "ldclient-rb/evaluation" require "ldclient-rb/flags_state" require "ldclient-rb/ldclient" require "ldclient-rb/cache_store" diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb deleted file mode 100644 index 43a03c23..00000000 --- a/lib/ldclient-rb/evaluation.rb +++ /dev/null @@ -1,445 +0,0 @@ -require "date" -require "semantic" - -module LaunchDarkly - # An object returned by {LDClient#variation_detail}, combining the result of a flag evaluation with - # an explanation of how it was calculated. - class EvaluationDetail - def initialize(value, variation_index, reason) - @value = value - @variation_index = variation_index - @reason = reason - end - - # - # The result of the flag evaluation. This will be either one of the flag's variations, or the - # default value that was passed to {LDClient#variation_detail}. It is the same as the return - # value of {LDClient#variation}. - # - # @return [Object] - # - attr_reader :value - - # - # The index of the returned value within the flag's list of variations. The first variation is - # 0, the second is 1, etc. This is `nil` if the default value was returned. - # - # @return [int|nil] - # - attr_reader :variation_index - - # - # An object describing the main factor that influenced the flag evaluation value. - # - # This object is currently represented as a Hash, which may have the following keys: - # - # `:kind`: The general category of reason. Possible values: - # - # * `'OFF'`: the flag was off and therefore returned its configured off value - # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules - # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag - # * `'RULE_MATCH'`: the user matched one of the flag's rules - # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one - # prerequisite flag that either was off or did not return the desired variation - # * `'ERROR'`: the flag could not be evaluated, so the default value was returned - # - # `:ruleIndex`: If the kind was `RULE_MATCH`, this is the positional index of the - # matched rule (0 for the first rule). - # - # `:ruleId`: If the kind was `RULE_MATCH`, this is the rule's unique identifier. - # - # `:prerequisiteKey`: If the kind was `PREREQUISITE_FAILED`, this is the flag key of - # the prerequisite flag that failed. - # - # `:errorKind`: If the kind was `ERROR`, this indicates the type of error: - # - # * `'CLIENT_NOT_READY'`: the caller tried to evaluate a flag before the client had - # successfully initialized - # * `'FLAG_NOT_FOUND'`: the caller provided a flag key that did not match any known flag - # * `'MALFORMED_FLAG'`: there was an internal inconsistency in the flag data, e.g. a - # rule specified a nonexistent variation - # * `'USER_NOT_SPECIFIED'`: the user object or user key was not provied - # * `'EXCEPTION'`: an unexpected exception stopped flag evaluation - # - # @return [Hash] - # - attr_reader :reason - - # - # Tests whether the flag evaluation returned a default value. This is the same as checking - # whether {#variation_index} is nil. - # - # @return [Boolean] - # - def default_value? - variation_index.nil? - end - - def ==(other) - @value == other.value && @variation_index == other.variation_index && @reason == other.reason - end - end - - # @private - module Evaluation - BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] - - NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") - - DATE_OPERAND = lambda do |v| - if v.is_a? String - begin - DateTime.rfc3339(v).strftime("%Q").to_i - rescue => e - nil - end - elsif v.is_a? Numeric - v - else - nil - end - end - - SEMVER_OPERAND = lambda do |v| - semver = nil - if v.is_a? String - for _ in 0..2 do - begin - semver = Semantic::Version.new(v) - break # Some versions of jruby cannot properly handle a return here and return from the method that calls this lambda - rescue ArgumentError - v = addZeroVersionComponent(v) - end - end - end - semver - end - - def self.addZeroVersionComponent(v) - NUMERIC_VERSION_COMPONENTS_REGEX.match(v) { |m| - m[0] + ".0" + v[m[0].length..-1] - } - end - - def self.comparator(converter) - lambda do |a, b| - av = converter.call(a) - bv = converter.call(b) - if !av.nil? && !bv.nil? - yield av <=> bv - else - return false - end - end - end - - OPERATORS = { - in: - lambda do |a, b| - a == b - end, - endsWith: - lambda do |a, b| - (a.is_a? String) && (a.end_with? b) - end, - startsWith: - lambda do |a, b| - (a.is_a? String) && (a.start_with? b) - end, - matches: - lambda do |a, b| - (b.is_a? String) && !(Regexp.new b).match(a).nil? - end, - contains: - lambda do |a, b| - (a.is_a? String) && (a.include? b) - end, - lessThan: - lambda do |a, b| - (a.is_a? Numeric) && (a < b) - end, - lessThanOrEqual: - lambda do |a, b| - (a.is_a? Numeric) && (a <= b) - end, - greaterThan: - lambda do |a, b| - (a.is_a? Numeric) && (a > b) - end, - greaterThanOrEqual: - lambda do |a, b| - (a.is_a? Numeric) && (a >= b) - end, - before: - comparator(DATE_OPERAND) { |n| n < 0 }, - after: - comparator(DATE_OPERAND) { |n| n > 0 }, - semVerEqual: - comparator(SEMVER_OPERAND) { |n| n == 0 }, - semVerLessThan: - comparator(SEMVER_OPERAND) { |n| n < 0 }, - semVerGreaterThan: - comparator(SEMVER_OPERAND) { |n| n > 0 }, - segmentMatch: - lambda do |a, b| - false # we should never reach this - instead we special-case this operator in clause_match_user - end - } - - # Used internally to hold an evaluation result and the events that were generated from prerequisites. - EvalResult = Struct.new(:detail, :events) - - USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION = [ :key, :secondary ] - # Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. - # This is because it could affect evaluation results for existing users (ch35206). - - def error_result(errorKind, value = nil) - EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) - end - - # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns - # the default value. Error conditions produce a result with an error reason, not an exception. - def evaluate(flag, user, store, logger, event_factory) - if user.nil? || user[:key].nil? - return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) - end - - sanitized_user = Util.stringify_attrs(user, USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION) - - events = [] - detail = eval_internal(flag, sanitized_user, store, events, logger, event_factory) - return EvalResult.new(detail, events) - end - - def eval_internal(flag, user, store, events, logger, event_factory) - if !flag[:on] - return get_off_value(flag, { kind: 'OFF' }, logger) - end - - prereq_failure_reason = check_prerequisites(flag, user, store, events, logger, event_factory) - if !prereq_failure_reason.nil? - return get_off_value(flag, prereq_failure_reason, logger) - end - - # Check user target matches - (flag[:targets] || []).each do |target| - (target[:values] || []).each do |value| - if value == user[:key] - return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }, logger) - end - end - end - - # Check custom rules - rules = flag[:rules] || [] - rules.each_index do |i| - rule = rules[i] - if rule_match_user(rule, user, store) - return get_value_for_variation_or_rollout(flag, rule, user, - { kind: 'RULE_MATCH', ruleIndex: i, ruleId: rule[:id] }, logger) - end - end - - # Check the fallthrough rule - if !flag[:fallthrough].nil? - return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, - { kind: 'FALLTHROUGH' }, logger) - end - - return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) - end - - def check_prerequisites(flag, user, store, events, logger, event_factory) - (flag[:prerequisites] || []).each do |prerequisite| - prereq_ok = true - prereq_key = prerequisite[:key] - prereq_flag = store.get(FEATURES, prereq_key) - - if prereq_flag.nil? - logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } - prereq_ok = false - else - begin - prereq_res = eval_internal(prereq_flag, user, store, events, logger, event_factory) - # Note that if the prerequisite flag is off, we don't consider it a match no matter what its - # off variation was. But we still need to evaluate it in order to generate an event. - if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] - prereq_ok = false - end - event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) - events.push(event) - rescue => exn - Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) - prereq_ok = false - end - end - if !prereq_ok - return { kind: 'PREREQUISITE_FAILED', prerequisiteKey: prereq_key } - end - end - nil - end - - def rule_match_user(rule, user, store) - return false if !rule[:clauses] - - (rule[:clauses] || []).each do |clause| - return false if !clause_match_user(clause, user, store) - end - - return true - end - - def clause_match_user(clause, user, store) - # In the case of a segment match operator, we check if the user is in any of the segments, - # and possibly negate - if clause[:op].to_sym == :segmentMatch - (clause[:values] || []).each do |v| - segment = store.get(SEGMENTS, v) - return maybe_negate(clause, true) if !segment.nil? && segment_match_user(segment, user) - end - return maybe_negate(clause, false) - end - clause_match_user_no_segments(clause, user) - end - - def clause_match_user_no_segments(clause, user) - val = user_value(user, clause[:attribute]) - return false if val.nil? - - op = OPERATORS[clause[:op].to_sym] - if op.nil? - return false - end - - if val.is_a? Enumerable - val.each do |v| - return maybe_negate(clause, true) if match_any(op, v, clause[:values]) - end - return maybe_negate(clause, false) - end - - maybe_negate(clause, match_any(op, val, clause[:values])) - end - - def variation_index_for_user(flag, rule, user) - if !rule[:variation].nil? # fixed variation - return rule[:variation] - elsif !rule[:rollout].nil? # percentage rollout - rollout = rule[:rollout] - bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] - bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt]) - sum = 0; - rollout[:variations].each do |variate| - sum += variate[:weight].to_f / 100000.0 - if bucket < sum - return variate[:variation] - end - end - nil - else # the rule isn't well-formed - nil - end - end - - def segment_match_user(segment, user) - return false unless user[:key] - - return true if segment[:included].include?(user[:key]) - return false if segment[:excluded].include?(user[:key]) - - (segment[:rules] || []).each do |r| - return true if segment_rule_match_user(r, user, segment[:key], segment[:salt]) - end - - return false - end - - def segment_rule_match_user(rule, user, segment_key, salt) - (rule[:clauses] || []).each do |c| - return false unless clause_match_user_no_segments(c, user) - end - - # If the weight is absent, this rule matches - return true if !rule[:weight] - - # All of the clauses are met. See if the user buckets in - bucket = bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt) - weight = rule[:weight].to_f / 100000.0 - return bucket < weight - end - - def bucket_user(user, key, bucket_by, salt) - return nil unless user[:key] - - id_hash = bucketable_string_value(user_value(user, bucket_by)) - if id_hash.nil? - return 0.0 - end - - if user[:secondary] - id_hash += "." + user[:secondary] - end - - hash_key = "%s.%s.%s" % [key, salt, id_hash] - - hash_val = (Digest::SHA1.hexdigest(hash_key))[0..14] - hash_val.to_i(16) / Float(0xFFFFFFFFFFFFFFF) - end - - def bucketable_string_value(value) - return value if value.is_a? String - return value.to_s if value.is_a? Integer - nil - end - - def user_value(user, attribute) - attribute = attribute.to_sym - - if BUILTINS.include? attribute - user[attribute] - elsif !user[:custom].nil? - user[:custom][attribute] - else - nil - end - end - - def maybe_negate(clause, b) - clause[:negate] ? !b : b - end - - def match_any(op, value, values) - values.each do |v| - return true if op.call(value, v) - end - return false - end - - private - - def get_variation(flag, index, reason, logger) - if index < 0 || index >= flag[:variations].length - logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") - return error_result('MALFORMED_FLAG') - end - EvaluationDetail.new(flag[:variations][index], index, reason) - end - - def get_off_value(flag, reason, logger) - if flag[:offVariation].nil? # off variation unspecified - return default value - return EvaluationDetail.new(nil, nil, reason) - end - get_variation(flag, flag[:offVariation], reason, logger) - end - - def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) - index = variation_index_for_user(flag, vr, user) - if index.nil? - logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") - return error_result('MALFORMED_FLAG') - end - return get_variation(flag, index, reason, logger) - end - end -end diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb new file mode 100644 index 00000000..9db9f0fe --- /dev/null +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -0,0 +1,80 @@ + +module LaunchDarkly +# An object returned by {LDClient#variation_detail}, combining the result of a flag evaluation with + # an explanation of how it was calculated. + class EvaluationDetail + def initialize(value, variation_index, reason) + @value = value + @variation_index = variation_index + @reason = reason + end + + # + # The result of the flag evaluation. This will be either one of the flag's variations, or the + # default value that was passed to {LDClient#variation_detail}. It is the same as the return + # value of {LDClient#variation}. + # + # @return [Object] + # + attr_reader :value + + # + # The index of the returned value within the flag's list of variations. The first variation is + # 0, the second is 1, etc. This is `nil` if the default value was returned. + # + # @return [int|nil] + # + attr_reader :variation_index + + # + # An object describing the main factor that influenced the flag evaluation value. + # + # This object is currently represented as a Hash, which may have the following keys: + # + # `:kind`: The general category of reason. Possible values: + # + # * `'OFF'`: the flag was off and therefore returned its configured off value + # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules + # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag + # * `'RULE_MATCH'`: the user matched one of the flag's rules + # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation + # * `'ERROR'`: the flag could not be evaluated, so the default value was returned + # + # `:ruleIndex`: If the kind was `RULE_MATCH`, this is the positional index of the + # matched rule (0 for the first rule). + # + # `:ruleId`: If the kind was `RULE_MATCH`, this is the rule's unique identifier. + # + # `:prerequisiteKey`: If the kind was `PREREQUISITE_FAILED`, this is the flag key of + # the prerequisite flag that failed. + # + # `:errorKind`: If the kind was `ERROR`, this indicates the type of error: + # + # * `'CLIENT_NOT_READY'`: the caller tried to evaluate a flag before the client had + # successfully initialized + # * `'FLAG_NOT_FOUND'`: the caller provided a flag key that did not match any known flag + # * `'MALFORMED_FLAG'`: there was an internal inconsistency in the flag data, e.g. a + # rule specified a nonexistent variation + # * `'USER_NOT_SPECIFIED'`: the user object or user key was not provied + # * `'EXCEPTION'`: an unexpected exception stopped flag evaluation + # + # @return [Hash] + # + attr_reader :reason + + # + # Tests whether the flag evaluation returned a default value. This is the same as checking + # whether {#variation_index} is nil. + # + # @return [Boolean] + # + def default_value? + variation_index.nil? + end + + def ==(other) + @value == other.value && @variation_index == other.variation_index && @reason == other.reason + end + end +end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb new file mode 100644 index 00000000..e84e369b --- /dev/null +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -0,0 +1,223 @@ +require "ldclient-rb/evaluation_detail" +require "ldclient-rb/impl/evaluator_bucketing" +require "ldclient-rb/impl/evaluator_operators" + +module LaunchDarkly + module Impl + class Evaluator + def initialize(get_flag, get_segment, logger) + @get_flag = get_flag + @get_segment = get_segment + @logger = logger + end + + # Used internally to hold an evaluation result and the events that were generated from prerequisites. + EvalResult = Struct.new(:detail, :events) + + def self.error_result(errorKind, value = nil) + EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) + end + + # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns + # the default value. Error conditions produce a result with an error reason, not an exception. + def evaluate(flag, user, event_factory) + if user.nil? || user[:key].nil? + return EvalResult.new(Evaluator.error_result('USER_NOT_SPECIFIED'), []) + end + + # If the flag doesn't have any prerequisites (which most flags don't) then it cannot generate any feature + # request events for prerequisites and we can skip allocating an array. + if flag[:prerequisites] && !flag[:prerequisites].empty? + events = [] + else + events = nil + end + + detail = eval_internal(flag, user, events, event_factory) + return EvalResult.new(detail, events.nil? || events.empty? ? nil : events) + end + + private + + def eval_internal(flag, user, events, event_factory) + if !flag[:on] + return get_off_value(flag, { kind: 'OFF' }) + end + + prereq_failure_reason = check_prerequisites(flag, user, events, event_factory) + if !prereq_failure_reason.nil? + return get_off_value(flag, prereq_failure_reason) + end + + # Check user target matches + (flag[:targets] || []).each do |target| + (target[:values] || []).each do |value| + if value == user[:key] + return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + end + end + end + + # Check custom rules + rules = flag[:rules] || [] + rules.each_index do |i| + rule = rules[i] + if rule_match_user(rule, user) + return get_value_for_variation_or_rollout(flag, rule, user, + { kind: 'RULE_MATCH', ruleIndex: i, ruleId: rule[:id] }) + end + end + + # Check the fallthrough rule + if !flag[:fallthrough].nil? + return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, + { kind: 'FALLTHROUGH' }) + end + + return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) + end + + def check_prerequisites(flag, user, events, event_factory) + (flag[:prerequisites] || []).each do |prerequisite| + prereq_ok = true + prereq_key = prerequisite[:key] + prereq_flag = @get_flag.call(prereq_key) + + if prereq_flag.nil? + @logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } + prereq_ok = false + else + begin + prereq_res = eval_internal(prereq_flag, user, events, event_factory) + # Note that if the prerequisite flag is off, we don't consider it a match no matter what its + # off variation was. But we still need to evaluate it in order to generate an event. + if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] + prereq_ok = false + end + event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) + events.push(event) + rescue => exn + Util.log_exception(@logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) + prereq_ok = false + end + end + if !prereq_ok + return { kind: 'PREREQUISITE_FAILED', prerequisiteKey: prereq_key } + end + end + nil + end + + def rule_match_user(rule, user) + return false if !rule[:clauses] + + (rule[:clauses] || []).each do |clause| + return false if !clause_match_user(clause, user) + end + + return true + end + + def clause_match_user(clause, user) + # In the case of a segment match operator, we check if the user is in any of the segments, + # and possibly negate + if clause[:op].to_sym == :segmentMatch + result = (clause[:values] || []).any? { |v| + segment = @get_segment.call(v) + !segment.nil? && segment_match_user(segment, user) + } + clause[:negate] ? !result : result + else + clause_match_user_no_segments(clause, user) + end + end + + def clause_match_user_no_segments(clause, user) + user_val = EvaluatorOperators.user_value(user, clause[:attribute]) + return false if user_val.nil? + + op = clause[:op].to_sym + clause_vals = clause[:values] + result = if user_val.is_a? Enumerable + user_val.any? { |uv| clause_vals.any? { |cv| EvaluatorOperators.apply(op, uv, cv) } } + else + clause_vals.any? { |cv| EvaluatorOperators.apply(op, user_val, cv) } + end + clause[:negate] ? !result : result + end + + def variation_index_for_user(flag, rule, user) + if !rule[:variation].nil? # fixed variation + return rule[:variation] + elsif !rule[:rollout].nil? # percentage rollout + rollout = rule[:rollout] + bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] + bucket = EvaluatorBucketing.bucket_user(user, flag[:key], bucket_by, flag[:salt]) + sum = 0; + rollout[:variations].each do |variate| + sum += variate[:weight].to_f / 100000.0 + if bucket < sum + return variate[:variation] + end + end + nil + else # the rule isn't well-formed + nil + end + end + + def segment_match_user(segment, user) + return false unless user[:key] + + return true if segment[:included].include?(user[:key]) + return false if segment[:excluded].include?(user[:key]) + + (segment[:rules] || []).each do |r| + return true if segment_rule_match_user(r, user, segment[:key], segment[:salt]) + end + + return false + end + + def segment_rule_match_user(rule, user, segment_key, salt) + (rule[:clauses] || []).each do |c| + return false unless clause_match_user_no_segments(c, user) + end + + # If the weight is absent, this rule matches + return true if !rule[:weight] + + # All of the clauses are met. See if the user buckets in + bucket = EvaluatorBucketing.bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt) + weight = rule[:weight].to_f / 100000.0 + return bucket < weight + end + + private + + def get_variation(flag, index, reason) + if index < 0 || index >= flag[:variations].length + @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") + return Evaluator.error_result('MALFORMED_FLAG') + end + EvaluationDetail.new(flag[:variations][index], index, reason) + end + + def get_off_value(flag, reason) + if flag[:offVariation].nil? # off variation unspecified - return default value + return EvaluationDetail.new(nil, nil, reason) + end + get_variation(flag, flag[:offVariation], reason) + end + + def get_value_for_variation_or_rollout(flag, vr, user, reason) + index = variation_index_for_user(flag, vr, user) + if index.nil? + @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") + return Evaluator.error_result('MALFORMED_FLAG') + end + return get_variation(flag, index, reason) + end + end + end +end diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb new file mode 100644 index 00000000..273ec1e6 --- /dev/null +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -0,0 +1,32 @@ + +module LaunchDarkly + module Impl + module EvaluatorBucketing + def self.bucket_user(user, key, bucket_by, salt) + return nil unless user[:key] + + id_hash = bucketable_string_value(EvaluatorOperators.user_value(user, bucket_by)) + if id_hash.nil? + return 0.0 + end + + if user[:secondary] + id_hash += "." + user[:secondary].to_s + end + + hash_key = "%s.%s.%s" % [key, salt, id_hash] + + hash_val = (Digest::SHA1.hexdigest(hash_key))[0..14] + hash_val.to_i(16) / Float(0xFFFFFFFFFFFFFFF) + end + + private + + def self.bucketable_string_value(value) + return value if value.is_a? String + return value.to_s if value.is_a? Integer + nil + end + end + end +end diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb new file mode 100644 index 00000000..2bc8643b --- /dev/null +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -0,0 +1,128 @@ +require "date" +require "semantic" + +module LaunchDarkly + module Impl + module EvaluatorOperators + def self.apply(op, user_value, clause_value) + case op + when :in + user_value == clause_value + when :startsWith + string_op(user_value, clause_value, lambda { |a, b| a.start_with? b }) + when :endsWith + string_op(user_value, clause_value, lambda { |a, b| a.end_with? b }) + when :contains + string_op(user_value, clause_value, lambda { |a, b| a.include? b }) + when :matches + string_op(user_value, clause_value, lambda { |a, b| !(Regexp.new b).match(a).nil? }) + when :lessThan + numeric_op(user_value, clause_value, lambda { |a, b| a < b }) + when :lessThanOrEqual + numeric_op(user_value, clause_value, lambda { |a, b| a <= b }) + when :greaterThan + numeric_op(user_value, clause_value, lambda { |a, b| a > b }) + when :greaterThanOrEqual + numeric_op(user_value, clause_value, lambda { |a, b| a >= b }) + when :before + date_op(user_value, clause_value, lambda { |a, b| a < b }) + when :after + date_op(user_value, clause_value, lambda { |a, b| a > b }) + when :semVerEqual + semver_op(user_value, clause_value, lambda { |a, b| a == b }) + when :semVerLessThan + semver_op(user_value, clause_value, lambda { |a, b| a < b }) + when :semVerGreaterThan + semver_op(user_value, clause_value, lambda { |a, b| a > b }) + when :segmentMatch + false # we should never reach this - instead we special-case this operator in clause_match_user + else + false + end + end + + def self.user_value(user, attribute) + attribute = attribute.to_sym + if BUILTINS.include? attribute + value = user[attribute] + return value.to_s if !value.nil? && !(value.is_a? String) + value + elsif !user[:custom].nil? + user[:custom][attribute] + else + nil + end + end + + private + + BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] + NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") + + private_constant :BUILTINS + private_constant :NUMERIC_VERSION_COMPONENTS_REGEX + + def self.string_op(user_value, clause_value, fn) + (user_value.is_a? String) && (clause_value.is_a? String) && fn.call(user_value, clause_value) + end + + def self.numeric_op(user_value, clause_value, fn) + (user_value.is_a? Numeric) && (clause_value.is_a? Numeric) && fn.call(user_value, clause_value) + end + + def self.date_op(user_value, clause_value, fn) + ud = to_date(user_value) + if !ud.nil? + cd = to_date(clause_value) + !cd.nil? && fn.call(ud, cd) + else + false + end + end + + def self.semver_op(user_value, clause_value, fn) + uv = to_semver(user_value) + if !uv.nil? + cv = to_semver(clause_value) + !cv.nil? && fn.call(uv, cv) + else + false + end + end + + def self.to_date(value) + if value.is_a? String + begin + DateTime.rfc3339(value).strftime("%Q").to_i + rescue => e + nil + end + elsif value.is_a? Numeric + value + else + nil + end + end + + def self.to_semver(value) + if value.is_a? String + for _ in 0..2 do + begin + return Semantic::Version.new(value) + rescue ArgumentError + value = add_zero_version_component(value) + end + end + end + nil + end + + def self.add_zero_version_component(v) + NUMERIC_VERSION_COMPONENTS_REGEX.match(v) { |m| + m[0] + ".0" + v[m[0].length..-1] + } + end + + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index b7c2ee85..8b22feca 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/evaluator" require "ldclient-rb/impl/event_factory" require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" @@ -13,7 +14,6 @@ module LaunchDarkly # should create a single client instance for the lifetime of the application. # class LDClient - include Evaluation include Impl # # Creates a new client instance that connects to LaunchDarkly. A custom @@ -46,6 +46,10 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) updated_config.instance_variable_set(:@feature_store, @store) @config = updated_config + get_flag = lambda { |key| @store.get(FEATURES, key) } + get_segment = lambda { |key| @store.get(SEGMENTS, key) } + @evaluator = LaunchDarkly::Impl::Evaluator.new(get_flag, get_segment, @config.logger) + if @config.offline? || !@config.send_events @event_processor = NullEventProcessor.new else @@ -310,7 +314,7 @@ def all_flags_state(user, options={}) next end begin - result = evaluate(f, user, @store, @config.logger, @event_factory_default) + result = @evaluator.evaluate(f, user, @event_factory_default) state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil, details_only_if_tracked) rescue => exn @@ -352,7 +356,7 @@ def create_default_data_source(sdk_key, config) # @return [EvaluationDetail] def evaluate_internal(key, user, default, event_factory) if @config.offline? - return error_result('CLIENT_NOT_READY', default) + return Evaluator.error_result('CLIENT_NOT_READY', default) end if !initialized? @@ -360,7 +364,7 @@ def evaluate_internal(key, user, default, event_factory) @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - detail = error_result('CLIENT_NOT_READY', default) + detail = Evaluator.error_result('CLIENT_NOT_READY', default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end @@ -370,20 +374,20 @@ def evaluate_internal(key, user, default, event_factory) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } - detail = error_result('FLAG_NOT_FOUND', default) + detail = Evaluator.error_result('FLAG_NOT_FOUND', default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end unless user @config.logger.error { "[LDClient] Must specify user" } - detail = error_result('USER_NOT_SPECIFIED', default) + detail = Evaluator.error_result('USER_NOT_SPECIFIED', default) @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end begin - res = evaluate(feature, user, @store, @config.logger, event_factory) + res = @evaluator.evaluate(feature, user, event_factory) if !res.events.nil? res.events.each do |event| @event_processor.add_event(event) @@ -397,7 +401,7 @@ def evaluate_internal(key, user, default, event_factory) return detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) - detail = error_result('EXCEPTION', default) + detail = Evaluator.error_result('EXCEPTION', default) @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb deleted file mode 100644 index ff4b63f6..00000000 --- a/spec/evaluation_spec.rb +++ /dev/null @@ -1,737 +0,0 @@ -require "spec_helper" - -describe LaunchDarkly::Evaluation do - subject { LaunchDarkly::Evaluation } - - include LaunchDarkly::Evaluation - - let(:features) { LaunchDarkly::InMemoryFeatureStore.new } - - let(:factory) { LaunchDarkly::Impl::EventFactory.new(false) } - - let(:user) { - { - key: "userkey", - email: "test@example.com", - name: "Bob" - } - } - - let(:logger) { LaunchDarkly::Config.default_logger } - - def boolean_flag_with_rules(rules) - { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } - end - - def boolean_flag_with_clauses(clauses) - boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) - end - - describe "evaluate" do - it "returns off variation if flag is off" do - flag = { - key: 'feature', - on: false, - offVariation: 1, - fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns nil if flag is off and off variation is unspecified" do - flag = { - key: 'feature', - on: false, - fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if off variation is too high" do - flag = { - key: 'feature', - on: false, - offVariation: 999, - fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if off variation is negative" do - flag = { - key: 'feature', - on: false, - offVariation: -1, - fallthrough: { variation: 0 }, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns off variation if prerequisite is not found" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'badfeature', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns off variation and event if prerequisite of a prerequisite is not found" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1 - } - flag1 = { - key: 'feature1', - on: true, - prerequisites: [{key: 'feature2', variation: 1}], # feature2 doesn't exist - fallthrough: { variation: 0 }, - variations: ['d', 'e'], - version: 2 - } - features.upsert(LaunchDarkly::FEATURES, flag1) - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' - }] - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) - end - - it "returns off variation and event if prerequisite is off" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1 - } - flag1 = { - key: 'feature1', - on: false, - # note that even though it returns the desired variation, it is still off and therefore not a match - offVariation: 1, - fallthrough: { variation: 0 }, - variations: ['d', 'e'], - version: 2 - } - features.upsert(LaunchDarkly::FEATURES, flag1) - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' - }] - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) - end - - it "returns off variation and event if prerequisite is not met" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1 - } - flag1 = { - key: 'feature1', - on: true, - fallthrough: { variation: 0 }, - variations: ['d', 'e'], - version: 2 - } - features.upsert(LaunchDarkly::FEATURES, flag1) - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' - }] - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) - end - - it "returns fallthrough variation and event if prerequisite is met and there are no rules" do - flag = { - key: 'feature0', - on: true, - prerequisites: [{key: 'feature1', variation: 1}], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'], - version: 1 - } - flag1 = { - key: 'feature1', - on: true, - fallthrough: { variation: 1 }, - variations: ['d', 'e'], - version: 2 - } - features.upsert(LaunchDarkly::FEATURES, flag1) - user = { key: 'x' } - detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) - events_should_be = [{ - kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' - }] - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(events_should_be) - end - - it "returns an error if fallthrough variation is too high" do - flag = { - key: 'feature', - on: true, - fallthrough: { variation: 999 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if fallthrough variation is negative" do - flag = { - key: 'feature', - on: true, - fallthrough: { variation: -1 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if fallthrough has no variation or rollout" do - flag = { - key: 'feature', - on: true, - fallthrough: { }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if fallthrough has a rollout with no variations" do - flag = { - key: 'feature', - on: true, - fallthrough: { rollout: { variations: [] } }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "matches user from targets" do - flag = { - key: 'feature', - on: true, - targets: [ - { values: [ 'whoever', 'userkey' ], variation: 2 } - ], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "matches user from rules" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(true, 1, - { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if rule variation is too high" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if rule variation is negative" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if rule has neither variation nor rollout" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "returns an error if rule has a rollout with no variations" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { variations: [] } } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = LaunchDarkly::EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq([]) - end - - it "coerces user key to a string for evaluation" do - clause = { attribute: 'key', op: 'in', values: ['999'] } - flag = boolean_flag_with_clauses([clause]) - user = { key: 999 } - result = evaluate(flag, user, features, logger, factory) - expect(result.detail.value).to eq(true) - end - - it "coerces secondary key to a string for evaluation" do - # We can't really verify that the rollout calculation works correctly, but we can at least - # make sure it doesn't error out if there's a non-string secondary value (ch35189) - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } - flag = boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } - result = evaluate(flag, user, features, logger, factory) - expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) - end - end - - describe "clause" do - it "can match built-in attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be true - end - - it "can match custom attribute" do - user = { key: 'x', name: 'Bob', custom: { legs: 4 } } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be true - end - - it "returns false for missing attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be false - end - - it "returns false for unknown operator" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'unknown', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be false - end - - it "does not stop evaluating rules after clause with unknown operator" do - user = { key: 'x', name: 'Bob' } - clause0 = { attribute: 'name', op: 'unknown', values: [4] } - rule0 = { clauses: [ clause0 ], variation: 1 } - clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } - rule1 = { clauses: [ clause1 ], variation: 1 } - flag = boolean_flag_with_rules([rule0, rule1]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be true - end - - it "can be negated" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be false - end - - it "retrieves segment from segment store for segmentMatch operator" do - segment = { - key: 'segkey', - included: [ 'userkey' ], - version: 1, - deleted: false - } - features.upsert(LaunchDarkly::SEGMENTS, segment) - - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be true - end - - it "falls through with no errors if referenced segment is not found" do - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be false - end - - it "can be negated" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = boolean_flag_with_clauses([clause]) - expect { - clause[:negate] = true - }.to change {evaluate(flag, user, features, logger, factory).detail.value}.from(true).to(false) - end - end - - describe "operators" do - dateStr1 = "2017-12-06T00:00:00.000-07:00" - dateStr2 = "2017-12-06T00:01:01.000-07:00" - dateMs1 = 10000000 - dateMs2 = 10000001 - invalidDate = "hey what's this?" - - operatorTests = [ - # numeric comparisons - [ :in, 99, 99, true ], - [ :in, 99.0001, 99.0001, true ], - [ :in, 99, 99.0001, false ], - [ :in, 99.0001, 99, false ], - [ :lessThan, 99, 99.0001, true ], - [ :lessThan, 99.0001, 99, false ], - [ :lessThan, 99, 99, false ], - [ :lessThanOrEqual, 99, 99.0001, true ], - [ :lessThanOrEqual, 99.0001, 99, false ], - [ :lessThanOrEqual, 99, 99, true ], - [ :greaterThan, 99.0001, 99, true ], - [ :greaterThan, 99, 99.0001, false ], - [ :greaterThan, 99, 99, false ], - [ :greaterThanOrEqual, 99.0001, 99, true ], - [ :greaterThanOrEqual, 99, 99.0001, false ], - [ :greaterThanOrEqual, 99, 99, true ], - - # string comparisons - [ :in, "x", "x", true ], - [ :in, "x", "xyz", false ], - [ :startsWith, "xyz", "x", true ], - [ :startsWith, "x", "xyz", false ], - [ :endsWith, "xyz", "z", true ], - [ :endsWith, "z", "xyz", false ], - [ :contains, "xyz", "y", true ], - [ :contains, "y", "xyz", false ], - - # mixed strings and numbers - [ :in, "99", 99, false ], - [ :in, 99, "99", false ], - #[ :contains, "99", 99, false ], # currently throws exception - would return false in Java SDK - #[ :startsWith, "99", 99, false ], # currently throws exception - would return false in Java SDK - #[ :endsWith, "99", 99, false ] # currently throws exception - would return false in Java SDK - [ :lessThanOrEqual, "99", 99, false ], - #[ :lessThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK - [ :greaterThanOrEqual, "99", 99, false ], - #[ :greaterThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK - - # regex - [ :matches, "hello world", "hello.*rld", true ], - [ :matches, "hello world", "hello.*orl", true ], - [ :matches, "hello world", "l+", true ], - [ :matches, "hello world", "(world|planet)", true ], - [ :matches, "hello world", "aloha", false ], - #[ :matches, "hello world", "***not a regex", false ] # currently throws exception - same as Java SDK - - # dates - [ :before, dateStr1, dateStr2, true ], - [ :before, dateMs1, dateMs2, true ], - [ :before, dateStr2, dateStr1, false ], - [ :before, dateMs2, dateMs1, false ], - [ :before, dateStr1, dateStr1, false ], - [ :before, dateMs1, dateMs1, false ], - [ :before, dateStr1, invalidDate, false ], - [ :after, dateStr1, dateStr2, false ], - [ :after, dateMs1, dateMs2, false ], - [ :after, dateStr2, dateStr1, true ], - [ :after, dateMs2, dateMs1, true ], - [ :after, dateStr1, dateStr1, false ], - [ :after, dateMs1, dateMs1, false ], - [ :after, dateStr1, invalidDate, false ], - - # semver - [ :semVerEqual, "2.0.1", "2.0.1", true ], - [ :semVerEqual, "2.0", "2.0.0", true ], - [ :semVerEqual, "2-rc1", "2.0.0-rc1", true ], - [ :semVerEqual, "2+build2", "2.0.0+build2", true ], - [ :semVerLessThan, "2.0.0", "2.0.1", true ], - [ :semVerLessThan, "2.0", "2.0.1", true ], - [ :semVerLessThan, "2.0.1", "2.0.0", false ], - [ :semVerLessThan, "2.0.1", "2.0", false ], - [ :semVerLessThan, "2.0.0-rc", "2.0.0-rc.beta", true ], - [ :semVerGreaterThan, "2.0.1", "2.0.0", true ], - [ :semVerGreaterThan, "2.0.1", "2.0", true ], - [ :semVerGreaterThan, "2.0.0", "2.0.1", false ], - [ :semVerGreaterThan, "2.0", "2.0.1", false ], - [ :semVerGreaterThan, "2.0.0-rc.1", "2.0.0-rc.0", true ], - [ :semVerLessThan, "2.0.1", "xbad%ver", false ], - [ :semVerGreaterThan, "2.0.1", "xbad%ver", false ] - ] - - operatorTests.each do |params| - op = params[0] - value1 = params[1] - value2 = params[2] - shouldBe = params[3] - it "should return #{shouldBe} for #{value1} #{op} #{value2}" do - user = { key: 'x', custom: { foo: value1 } } - clause = { attribute: 'foo', op: op, values: [value2] } - flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger, factory).detail.value).to be shouldBe - end - end - end - - describe "bucket_user" do - it "gets expected bucket values for specific keys" do - user = { key: "userKeyA" } - bucket = bucket_user(user, "hashKey", "key", "saltyA") - expect(bucket).to be_within(0.0000001).of(0.42157587); - - user = { key: "userKeyB" } - bucket = bucket_user(user, "hashKey", "key", "saltyA") - expect(bucket).to be_within(0.0000001).of(0.6708485); - - user = { key: "userKeyC" } - bucket = bucket_user(user, "hashKey", "key", "saltyA") - expect(bucket).to be_within(0.0000001).of(0.10343106); - end - - it "can bucket by int value (equivalent to string)" do - user = { - key: "userkey", - custom: { - stringAttr: "33333", - intAttr: 33333 - } - } - stringResult = bucket_user(user, "hashKey", "stringAttr", "saltyA") - intResult = bucket_user(user, "hashKey", "intAttr", "saltyA") - - expect(intResult).to be_within(0.0000001).of(0.54771423) - expect(intResult).to eq(stringResult) - end - - it "cannot bucket by float value" do - user = { - key: "userkey", - custom: { - floatAttr: 33.5 - } - } - result = bucket_user(user, "hashKey", "floatAttr", "saltyA") - expect(result).to eq(0.0) - end - - - it "cannot bucket by bool value" do - user = { - key: "userkey", - custom: { - boolAttr: true - } - } - result = bucket_user(user, "hashKey", "boolAttr", "saltyA") - expect(result).to eq(0.0) - end - end - - def make_segment(key) - { - key: key, - included: [], - excluded: [], - salt: 'abcdef', - version: 1 - } - end - - def make_segment_match_clause(segment) - { - op: :segmentMatch, - values: [ segment[:key] ], - negate: false - } - end - - def make_user_matching_clause(user, attr) - { - attribute: attr.to_s, - op: :in, - values: [ user[attr.to_sym] ], - negate: false - } - end - - describe 'segment matching' do - def test_segment_match(segment) - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - flag = boolean_flag_with_clauses([clause]) - evaluate(flag, user, features, logger, factory).detail.value - end - - it 'explicitly includes user' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end - - it 'explicitly excludes user' do - segment = make_segment('segkey') - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be false - end - - it 'both includes and excludes user; include takes priority' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user by rule when weight is absent' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user by rule when weight is nil' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: nil - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user with full rollout' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 100000 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it "doesn't match user with zero rollout" do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 0 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false - end - - it "matches user with multiple clauses" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it "doesn't match user with multiple clauses if a clause doesn't match" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segClause2[:values] = [ 'wrong' ] - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false - end - end -end diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb new file mode 100644 index 00000000..a840a7c7 --- /dev/null +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -0,0 +1,59 @@ +require "spec_helper" + +describe LaunchDarkly::Impl::EvaluatorBucketing do + subject { LaunchDarkly::Impl::EvaluatorBucketing } + + describe "bucket_user" do + it "gets expected bucket values for specific keys" do + user = { key: "userKeyA" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + expect(bucket).to be_within(0.0000001).of(0.42157587); + + user = { key: "userKeyB" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + expect(bucket).to be_within(0.0000001).of(0.6708485); + + user = { key: "userKeyC" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + expect(bucket).to be_within(0.0000001).of(0.10343106); + end + + it "can bucket by int value (equivalent to string)" do + user = { + key: "userkey", + custom: { + stringAttr: "33333", + intAttr: 33333 + } + } + stringResult = subject.bucket_user(user, "hashKey", "stringAttr", "saltyA") + intResult = subject.bucket_user(user, "hashKey", "intAttr", "saltyA") + + expect(intResult).to be_within(0.0000001).of(0.54771423) + expect(intResult).to eq(stringResult) + end + + it "cannot bucket by float value" do + user = { + key: "userkey", + custom: { + floatAttr: 33.5 + } + } + result = subject.bucket_user(user, "hashKey", "floatAttr", "saltyA") + expect(result).to eq(0.0) + end + + + it "cannot bucket by bool value" do + user = { + key: "userkey", + custom: { + boolAttr: true + } + } + result = subject.bucket_user(user, "hashKey", "boolAttr", "saltyA") + expect(result).to eq(0.0) + end + end +end diff --git a/spec/impl/evaluator_operators_spec.rb b/spec/impl/evaluator_operators_spec.rb new file mode 100644 index 00000000..d24087f2 --- /dev/null +++ b/spec/impl/evaluator_operators_spec.rb @@ -0,0 +1,106 @@ +require "spec_helper" + +describe LaunchDarkly::Impl::EvaluatorOperators do + subject { LaunchDarkly::Impl::EvaluatorOperators } + + describe "operators" do + dateStr1 = "2017-12-06T00:00:00.000-07:00" + dateStr2 = "2017-12-06T00:01:01.000-07:00" + dateMs1 = 10000000 + dateMs2 = 10000001 + invalidDate = "hey what's this?" + + operatorTests = [ + # numeric comparisons + [ :in, 99, 99, true ], + [ :in, 99.0001, 99.0001, true ], + [ :in, 99, 99.0001, false ], + [ :in, 99.0001, 99, false ], + [ :lessThan, 99, 99.0001, true ], + [ :lessThan, 99.0001, 99, false ], + [ :lessThan, 99, 99, false ], + [ :lessThanOrEqual, 99, 99.0001, true ], + [ :lessThanOrEqual, 99.0001, 99, false ], + [ :lessThanOrEqual, 99, 99, true ], + [ :greaterThan, 99.0001, 99, true ], + [ :greaterThan, 99, 99.0001, false ], + [ :greaterThan, 99, 99, false ], + [ :greaterThanOrEqual, 99.0001, 99, true ], + [ :greaterThanOrEqual, 99, 99.0001, false ], + [ :greaterThanOrEqual, 99, 99, true ], + + # string comparisons + [ :in, "x", "x", true ], + [ :in, "x", "xyz", false ], + [ :startsWith, "xyz", "x", true ], + [ :startsWith, "x", "xyz", false ], + [ :endsWith, "xyz", "z", true ], + [ :endsWith, "z", "xyz", false ], + [ :contains, "xyz", "y", true ], + [ :contains, "y", "xyz", false ], + + # mixed strings and numbers + [ :in, "99", 99, false ], + [ :in, 99, "99", false ], + #[ :contains, "99", 99, false ], # currently throws exception - would return false in Java SDK + #[ :startsWith, "99", 99, false ], # currently throws exception - would return false in Java SDK + #[ :endsWith, "99", 99, false ] # currently throws exception - would return false in Java SDK + [ :lessThanOrEqual, "99", 99, false ], + #[ :lessThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK + [ :greaterThanOrEqual, "99", 99, false ], + #[ :greaterThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK + + # regex + [ :matches, "hello world", "hello.*rld", true ], + [ :matches, "hello world", "hello.*orl", true ], + [ :matches, "hello world", "l+", true ], + [ :matches, "hello world", "(world|planet)", true ], + [ :matches, "hello world", "aloha", false ], + #[ :matches, "hello world", "***not a regex", false ] # currently throws exception - same as Java SDK + + # dates + [ :before, dateStr1, dateStr2, true ], + [ :before, dateMs1, dateMs2, true ], + [ :before, dateStr2, dateStr1, false ], + [ :before, dateMs2, dateMs1, false ], + [ :before, dateStr1, dateStr1, false ], + [ :before, dateMs1, dateMs1, false ], + [ :before, dateStr1, invalidDate, false ], + [ :after, dateStr1, dateStr2, false ], + [ :after, dateMs1, dateMs2, false ], + [ :after, dateStr2, dateStr1, true ], + [ :after, dateMs2, dateMs1, true ], + [ :after, dateStr1, dateStr1, false ], + [ :after, dateMs1, dateMs1, false ], + [ :after, dateStr1, invalidDate, false ], + + # semver + [ :semVerEqual, "2.0.1", "2.0.1", true ], + [ :semVerEqual, "2.0", "2.0.0", true ], + [ :semVerEqual, "2-rc1", "2.0.0-rc1", true ], + [ :semVerEqual, "2+build2", "2.0.0+build2", true ], + [ :semVerLessThan, "2.0.0", "2.0.1", true ], + [ :semVerLessThan, "2.0", "2.0.1", true ], + [ :semVerLessThan, "2.0.1", "2.0.0", false ], + [ :semVerLessThan, "2.0.1", "2.0", false ], + [ :semVerLessThan, "2.0.0-rc", "2.0.0-rc.beta", true ], + [ :semVerGreaterThan, "2.0.1", "2.0.0", true ], + [ :semVerGreaterThan, "2.0.1", "2.0", true ], + [ :semVerGreaterThan, "2.0.0", "2.0.1", false ], + [ :semVerGreaterThan, "2.0", "2.0.1", false ], + [ :semVerGreaterThan, "2.0.0-rc.1", "2.0.0-rc.0", true ], + [ :semVerLessThan, "2.0.1", "xbad%ver", false ], + [ :semVerGreaterThan, "2.0.1", "xbad%ver", false ] + ] + + operatorTests.each do |params| + op = params[0] + value1 = params[1] + value2 = params[2] + shouldBe = params[3] + it "should return #{shouldBe} for #{value1} #{op} #{value2}" do + expect(subject::apply(op, value1, value2)).to be shouldBe + end + end + end +end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb new file mode 100644 index 00000000..04f99a22 --- /dev/null +++ b/spec/impl/evaluator_spec.rb @@ -0,0 +1,600 @@ +require "spec_helper" + +module LaunchDarkly + module Impl + describe "Evaluator" do + subject { Evaluator } + + let(:factory) { EventFactory.new(false) } + + let(:user) { + { + key: "userkey", + email: "test@example.com", + name: "Bob" + } + } + + let(:logger) { ::Logger.new($stdout, level: ::Logger::FATAL) } + + def get_nothing + lambda { |key| raise "should not have requested #{key}" } + end + + def get_things(map) + lambda { |key| + raise "should not have requested #{key}" if !map.has_key?(key) + map[key] + } + end + + def basic_evaluator + subject.new(get_nothing, get_nothing, logger) + end + + def boolean_flag_with_rules(rules) + { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } + end + + def boolean_flag_with_clauses(clauses) + boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) + end + + describe "evaluate" do + it "returns off variation if flag is off" do + flag = { + key: 'feature', + on: false, + offVariation: 1, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, { kind: 'OFF' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns nil if flag is off and off variation is unspecified" do + flag = { + key: 'feature', + on: false, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new(nil, nil, { kind: 'OFF' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if off variation is too high" do + flag = { + key: 'feature', + on: false, + offVariation: 999, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if off variation is negative" do + flag = { + key: 'feature', + on: false, + offVariation: -1, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns off variation if prerequisite is not found" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'badfeature', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) + e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns off variation and event if prerequisite of a prerequisite is not found" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: true, + prerequisites: [{key: 'feature2', variation: 1}], # feature2 doesn't exist + fallthrough: { variation: 0 }, + variations: ['d', 'e'], + version: 2 + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' + }] + get_flag = get_things('feature1' => flag1, 'feature2' => nil) + e = subject.new(get_flag, get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + + it "returns off variation and event if prerequisite is off" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: false, + # note that even though it returns the desired variation, it is still off and therefore not a match + offVariation: 1, + fallthrough: { variation: 0 }, + variations: ['d', 'e'], + version: 2 + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' + }] + get_flag = get_things({ 'feature1' => flag1 }) + e = subject.new(get_flag, get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + + it "returns off variation and event if prerequisite is not met" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: true, + fallthrough: { variation: 0 }, + variations: ['d', 'e'], + version: 2 + } + user = { key: 'x' } + detail = EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' + }] + get_flag = get_things({ 'feature1' => flag1 }) + e = subject.new(get_flag, get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + + it "returns fallthrough variation and event if prerequisite is met and there are no rules" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: true, + fallthrough: { variation: 1 }, + variations: ['d', 'e'], + version: 2 + } + user = { key: 'x' } + detail = EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' + }] + get_flag = get_things({ 'feature1' => flag1 }) + e = subject.new(get_flag, get_nothing, logger) + result = e.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + + it "returns an error if fallthrough variation is too high" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: 999 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if fallthrough variation is negative" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: -1 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if fallthrough has no variation or rollout" do + flag = { + key: 'feature', + on: true, + fallthrough: { }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if fallthrough has a rollout with no variations" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { variations: [] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "matches user from targets" do + flag = { + key: 'feature', + on: true, + targets: [ + { values: [ 'whoever', 'userkey' ], variation: 2 } + ], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "matches user from rules" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, + { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule variation is too high" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule variation is negative" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule has neither variation nor rollout" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule has a rollout with no variations" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [] } } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "coerces user key to a string for evaluation" do + clause = { attribute: 'key', op: 'in', values: ['999'] } + flag = boolean_flag_with_clauses([clause]) + user = { key: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.value).to eq(true) + end + + it "coerces secondary key to a string for evaluation" do + # We can't really verify that the rollout calculation works correctly, but we can at least + # make sure it doesn't error out if there's a non-string secondary value (ch35189) + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) + end + end + + describe "clause" do + it "can match built-in attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "can match custom attribute" do + user = { key: 'x', name: 'Bob', custom: { legs: 4 } } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "returns false for missing attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "returns false for unknown operator" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'unknown', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "does not stop evaluating rules after clause with unknown operator" do + user = { key: 'x', name: 'Bob' } + clause0 = { attribute: 'name', op: 'unknown', values: [4] } + rule0 = { clauses: [ clause0 ], variation: 1 } + clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } + rule1 = { clauses: [ clause1 ], variation: 1 } + flag = boolean_flag_with_rules([rule0, rule1]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "can be negated" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "retrieves segment from segment store for segmentMatch operator" do + segment = { + key: 'segkey', + included: [ 'userkey' ], + version: 1, + deleted: false + } + get_segment = get_things({ 'segkey' => segment }) + e = subject.new(get_nothing, get_segment, logger) + user = { key: 'userkey' } + clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } + flag = boolean_flag_with_clauses([clause]) + expect(e.evaluate(flag, user, factory).detail.value).to be true + end + + it "falls through with no errors if referenced segment is not found" do + e = subject.new(get_nothing, get_things({ 'segkey' => nil }), logger) + user = { key: 'userkey' } + clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } + flag = boolean_flag_with_clauses([clause]) + expect(e.evaluate(flag, user, factory).detail.value).to be false + end + + it "can be negated" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = boolean_flag_with_clauses([clause]) + expect { + clause[:negate] = true + }.to change {basic_evaluator.evaluate(flag, user, factory).detail.value}.from(true).to(false) + end + end + + def make_segment(key) + { + key: key, + included: [], + excluded: [], + salt: 'abcdef', + version: 1 + } + end + + def make_segment_match_clause(segment) + { + op: :segmentMatch, + values: [ segment[:key] ], + negate: false + } + end + + def make_user_matching_clause(user, attr) + { + attribute: attr.to_s, + op: :in, + values: [ user[attr.to_sym] ], + negate: false + } + end + + describe 'segment matching' do + def test_segment_match(segment) + clause = make_segment_match_clause(segment) + flag = boolean_flag_with_clauses([clause]) + e = subject.new(get_nothing, get_things({ segment[:key] => segment }), logger) + e.evaluate(flag, user, factory).detail.value + end + + it 'explicitly includes user' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + expect(test_segment_match(segment)).to be true + end + + it 'explicitly excludes user' do + segment = make_segment('segkey') + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(segment)).to be false + end + + it 'both includes and excludes user; include takes priority' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user by rule when weight is absent' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user by rule when weight is nil' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: nil + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user with full rollout' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 100000 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it "doesn't match user with zero rollout" do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 0 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be false + end + + it "matches user with multiple clauses" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it "doesn't match user with multiple clauses if a clause doesn't match" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segClause2[:values] = [ 'wrong' ] + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be false + end + end + end + end +end From 45ea4379045e99c122fb1a05960f11db5a071178 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 12:40:10 -0800 Subject: [PATCH 136/182] comments --- lib/ldclient-rb/impl/evaluator.rb | 27 ++++++++++++++++++--- lib/ldclient-rb/impl/evaluator_bucketing.rb | 8 ++++++ lib/ldclient-rb/impl/evaluator_operators.rb | 25 +++++++++++++++++-- 3 files changed, 55 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index e84e369b..abcde944 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -4,22 +4,43 @@ module LaunchDarkly module Impl + # Encapsulates the feature flag evaluation logic. The Evaluator has no knowledge of the rest of the SDK environment; + # if it needs to retrieve flags or segments that are referenced by a flag, it does so through a simple function that + # is provided in the constructor. It also produces feature requests as appropriate for any referenced prerequisite + # flags, but does not send them. class Evaluator + # A single Evaluator is instantiated for each client instance. + # + # @param get_flag [Function] called if the Evaluator needs to query a different flag from the one that it is + # currently evaluating (i.e. a prerequisite flag); takes a single parameter, the flag key, and returns the + # flag data - or nil if the flag is unknown or deleted + # @param get_segment [Function] similar to `get_flag`, but is used to query a user segment. + # @param logger [Logger] the client's logger def initialize(get_flag, get_segment, logger) @get_flag = get_flag @get_segment = get_segment @logger = logger end - # Used internally to hold an evaluation result and the events that were generated from prerequisites. + # Used internally to hold an evaluation result and the events that were generated from prerequisites. The + # `detail` property is an EvaluationDetail. The `events` property can be either an array of feature request + # events or nil. EvalResult = Struct.new(:detail, :events) + # Helper function used internally to construct an EvaluationDetail for an error result. def self.error_result(errorKind, value = nil) EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) end - # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns - # the default value. Error conditions produce a result with an error reason, not an exception. + # The client's entry point for evaluating a flag. The returned `EvalResult` contains the evaluation result and + # any events that were generated for prerequisite flags; its `value` will be `nil` if the flag returns the + # default value. Error conditions produce a result with a nil value and an error reason, not an exception. + # + # @param flag [Object] the flag + # @param user [Object] the user properties + # @param event_factory [EventFactory] called to construct a feature request event when a prerequisite flag is + # evaluated; the caller is responsible for constructing the feature event for the top-level evaluation + # @return [EvalResult] the evaluation result def evaluate(flag, user, event_factory) if user.nil? || user[:key].nil? return EvalResult.new(Evaluator.error_result('USER_NOT_SPECIFIED'), []) diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index 273ec1e6..4c00a009 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -1,7 +1,15 @@ module LaunchDarkly module Impl + # Encapsulates the logic for percentage rollouts. module EvaluatorBucketing + # Returns a user's bucket value as a floating-point value in `[0, 1)`. + # + # @param user [Object] the user properties + # @param key [String] the feature flag key (or segment key, if this is for a segment rule) + # @param bucket_by [String|Symbol] the name of the user attribute to be used for bucketing + # @param salt [String] the feature flag's or segment's salt value + # @return [Number] the bucket value, from 0 inclusive to 1 exclusive def self.bucket_user(user, key, bucket_by, salt) return nil unless user[:key] diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 2bc8643b..18a22f35 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -3,7 +3,17 @@ module LaunchDarkly module Impl + # Defines the behavior of all operators that can be used in feature flag rules and segment rules. module EvaluatorOperators + # Applies an operator to produce a boolean result. + # + # @param op [Symbol] one of the supported LaunchDarkly operators, as a symbol + # @param user_value the value of the user attribute that is referenced in the current clause (left-hand + # side of the expression) + # @param clause_value the constant value that `user_value` is being compared to (right-hand side of the + # expression) + # @return [Boolean] true if the expression should be considered a match; false if it is not a match, or + # if the values cannot be compared because they are of the wrong types, or if the operator is unknown def self.apply(op, user_value, clause_value) case op when :in @@ -35,12 +45,23 @@ def self.apply(op, user_value, clause_value) when :semVerGreaterThan semver_op(user_value, clause_value, lambda { |a, b| a > b }) when :segmentMatch - false # we should never reach this - instead we special-case this operator in clause_match_user + # We should never reach this; it can't be evaluated based on just two parameters, because it requires + # looking up the segment from the data store. Instead, we special-case this operator in clause_match_user. + false else false end end + # Retrieves the value of a user attribute by name. + # + # Built-in attributes correspond to top-level properties in the user object, and are always coerced to + # strings except for `anonymous`. Custom attributes correspond to properties within the `custom` property, + # if any, and can be of any type. + # + # @param user [Object] the user properties + # @param attribute [String|Symbol] the attribute to get, for instance `:key` or `:name` or `:some_custom_attr` + # @return the attribute value, or nil if the attribute is unknown def self.user_value(user, attribute) attribute = attribute.to_sym if BUILTINS.include? attribute @@ -56,7 +77,7 @@ def self.user_value(user, attribute) private - BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] + BUILTINS = Set[:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") private_constant :BUILTINS From 575352c9cb308c2075bcd83b43cfcb0a306fd4e5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 12:47:27 -0800 Subject: [PATCH 137/182] fix type coercion behavior --- lib/ldclient-rb/impl/evaluator_operators.rb | 17 +++++++--- spec/impl/evaluator_operators_spec.rb | 35 +++++++++++++++++++++ 2 files changed, 47 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 18a22f35..98ac2e40 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -1,5 +1,6 @@ require "date" require "semantic" +require "set" module LaunchDarkly module Impl @@ -55,9 +56,13 @@ def self.apply(op, user_value, clause_value) # Retrieves the value of a user attribute by name. # - # Built-in attributes correspond to top-level properties in the user object, and are always coerced to - # strings except for `anonymous`. Custom attributes correspond to properties within the `custom` property, - # if any, and can be of any type. + # Built-in attributes correspond to top-level properties in the user object. They are treated as strings and + # non-string values are coerced to strings, except for `anonymous` which is treated as a boolean if present + # (using Ruby's "truthiness" standard). The coercion behavior is not guaranteed to be consistent with other + # SDKs; the built-in attributes should not be set to values of the wrong type (in the strongly-typed SDKs, + # they can't be, and in a future version of the Ruby SDK we may make it impossible to do so). + # + # Custom attributes correspond to properties within the `custom` property, if any, and can be of any type. # # @param user [Object] the user properties # @param attribute [String|Symbol] the attribute to get, for instance `:key` or `:name` or `:some_custom_attr` @@ -66,8 +71,8 @@ def self.user_value(user, attribute) attribute = attribute.to_sym if BUILTINS.include? attribute value = user[attribute] - return value.to_s if !value.nil? && !(value.is_a? String) - value + return nil if value.nil? + (attribute == :anonymous) ? !!value : value.to_s elsif !user[:custom].nil? user[:custom][attribute] else @@ -78,9 +83,11 @@ def self.user_value(user, attribute) private BUILTINS = Set[:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] + NON_STRING_BUILTINS = Set[:anonymous] NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") private_constant :BUILTINS + private_constant :NON_STRING_BUILTINS private_constant :NUMERIC_VERSION_COMPONENTS_REGEX def self.string_op(user_value, clause_value, fn) diff --git a/spec/impl/evaluator_operators_spec.rb b/spec/impl/evaluator_operators_spec.rb index d24087f2..92c68483 100644 --- a/spec/impl/evaluator_operators_spec.rb +++ b/spec/impl/evaluator_operators_spec.rb @@ -103,4 +103,39 @@ end end end + + describe "user_value" do + [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous, :some_custom_attr].each do |attr| + it "returns nil if property #{attr} is not defined" do + expect(subject::user_value({}, attr)).to be nil + end + end + + [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name].each do |attr| + it "gets string value of string property #{attr}" do + expect(subject::user_value({ attr => 'x' }, attr)).to eq 'x' + end + + it "coerces non-string value of property #{attr} to string" do + expect(subject::user_value({ attr => 3 }, attr)).to eq '3' + end + end + + it "gets boolean value of property anonymous" do + expect(subject::user_value({ anonymous: true }, :anonymous)).to be true + expect(subject::user_value({ anonymous: false }, :anonymous)).to be false + end + + it "coerces non-boolean value of property anonymous to boolean" do + expect(subject::user_value({ anonymous: 3 }, :anonymous)).to be true + end + + it "gets string value of custom property" do + expect(subject::user_value({ custom: { some_custom_attr: 'x' } }, :some_custom_attr)).to eq 'x' + end + + it "gets non-string value of custom property" do + expect(subject::user_value({ custom: { some_custom_attr: 3 } }, :some_custom_attr)).to eq 3 + end + end end From 62548d1d62257e000885fde8c5afd30da625a3a7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 12:54:06 -0800 Subject: [PATCH 138/182] make type coercion behavior consistent with earlier versions for now --- lib/ldclient-rb/impl/evaluator_operators.rb | 12 +++++------- spec/impl/evaluator_operators_spec.rb | 4 ++-- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 98ac2e40..56621790 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -57,10 +57,10 @@ def self.apply(op, user_value, clause_value) # Retrieves the value of a user attribute by name. # # Built-in attributes correspond to top-level properties in the user object. They are treated as strings and - # non-string values are coerced to strings, except for `anonymous` which is treated as a boolean if present - # (using Ruby's "truthiness" standard). The coercion behavior is not guaranteed to be consistent with other - # SDKs; the built-in attributes should not be set to values of the wrong type (in the strongly-typed SDKs, - # they can't be, and in a future version of the Ruby SDK we may make it impossible to do so). + # non-string values are coerced to strings, except for `anonymous` which is meant to be a boolean if present + # and is not currently coerced. This behavior is consistent with earlier versions of the Ruby SDK, but is not + # guaranteed to be consistent with other SDKs, since the evaluator specification is based on the strongly-typed + # SDKs where it is not possible for an attribute to have the wrong type. # # Custom attributes correspond to properties within the `custom` property, if any, and can be of any type. # @@ -72,7 +72,7 @@ def self.user_value(user, attribute) if BUILTINS.include? attribute value = user[attribute] return nil if value.nil? - (attribute == :anonymous) ? !!value : value.to_s + (attribute == :anonymous) ? value : value.to_s elsif !user[:custom].nil? user[:custom][attribute] else @@ -83,11 +83,9 @@ def self.user_value(user, attribute) private BUILTINS = Set[:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] - NON_STRING_BUILTINS = Set[:anonymous] NUMERIC_VERSION_COMPONENTS_REGEX = Regexp.new("^[0-9.]*") private_constant :BUILTINS - private_constant :NON_STRING_BUILTINS private_constant :NUMERIC_VERSION_COMPONENTS_REGEX def self.string_op(user_value, clause_value, fn) diff --git a/spec/impl/evaluator_operators_spec.rb b/spec/impl/evaluator_operators_spec.rb index 92c68483..7fdb05ca 100644 --- a/spec/impl/evaluator_operators_spec.rb +++ b/spec/impl/evaluator_operators_spec.rb @@ -126,8 +126,8 @@ expect(subject::user_value({ anonymous: false }, :anonymous)).to be false end - it "coerces non-boolean value of property anonymous to boolean" do - expect(subject::user_value({ anonymous: 3 }, :anonymous)).to be true + it "does not coerces non-boolean value of property anonymous" do + expect(subject::user_value({ anonymous: 3 }, :anonymous)).to eq 3 end it "gets string value of custom property" do From fd1d8371369a8ae92ed117dd374b70d9b4028f56 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 13:27:22 -0800 Subject: [PATCH 139/182] whitespace --- lib/ldclient-rb/impl/evaluator_operators.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/ldclient-rb/impl/evaluator_operators.rb b/lib/ldclient-rb/impl/evaluator_operators.rb index 56621790..62fc0927 100644 --- a/lib/ldclient-rb/impl/evaluator_operators.rb +++ b/lib/ldclient-rb/impl/evaluator_operators.rb @@ -148,7 +148,6 @@ def self.add_zero_version_component(v) m[0] + ".0" + v[m[0].length..-1] } end - end end end From 3c5289799ef4a0ff83422170fcc3985d3996f3a9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 16:36:03 -0800 Subject: [PATCH 140/182] break up Evaluator tests further --- spec/impl/evaluator_clause_spec.rb | 55 +++++ spec/impl/evaluator_rule_spec.rb | 85 ++++++++ spec/impl/evaluator_segment_spec.rb | 125 +++++++++++ spec/impl/evaluator_spec.rb | 313 +--------------------------- spec/impl/evaluator_spec_base.rb | 75 +++++++ 5 files changed, 342 insertions(+), 311 deletions(-) create mode 100644 spec/impl/evaluator_clause_spec.rb create mode 100644 spec/impl/evaluator_rule_spec.rb create mode 100644 spec/impl/evaluator_segment_spec.rb create mode 100644 spec/impl/evaluator_spec_base.rb diff --git a/spec/impl/evaluator_clause_spec.rb b/spec/impl/evaluator_clause_spec.rb new file mode 100644 index 00000000..a90a5499 --- /dev/null +++ b/spec/impl/evaluator_clause_spec.rb @@ -0,0 +1,55 @@ +require "spec_helper" +require "impl/evaluator_spec_base" + +module LaunchDarkly + module Impl + describe "Evaluator (clauses)", :evaluator_spec_base => true do + subject { Evaluator } + + it "can match built-in attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "can match custom attribute" do + user = { key: 'x', name: 'Bob', custom: { legs: 4 } } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "returns false for missing attribute" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'legs', op: 'in', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "returns false for unknown operator" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'unknown', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + + it "does not stop evaluating rules after clause with unknown operator" do + user = { key: 'x', name: 'Bob' } + clause0 = { attribute: 'name', op: 'unknown', values: [4] } + rule0 = { clauses: [ clause0 ], variation: 1 } + clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } + rule1 = { clauses: [ clause1 ], variation: 1 } + flag = boolean_flag_with_rules([rule0, rule1]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true + end + + it "can be negated" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } + flag = boolean_flag_with_clauses([clause]) + expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false + end + end + end +end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb new file mode 100644 index 00000000..ee2e315b --- /dev/null +++ b/spec/impl/evaluator_rule_spec.rb @@ -0,0 +1,85 @@ +require "spec_helper" +require "impl/evaluator_spec_base" + +module LaunchDarkly + module Impl + describe "Evaluator (rules)", :evaluator_spec_base => true do + subject { Evaluator } + + it "matches user from rules" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, + { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule variation is too high" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule variation is negative" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule has neither variation nor rollout" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "returns an error if rule has a rollout with no variations" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [] } } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail).to eq(detail) + expect(result.events).to eq(nil) + end + + it "coerces user key to a string for evaluation" do + clause = { attribute: 'key', op: 'in', values: ['999'] } + flag = boolean_flag_with_clauses([clause]) + user = { key: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.value).to eq(true) + end + + it "coerces secondary key to a string for evaluation" do + # We can't really verify that the rollout calculation works correctly, but we can at least + # make sure it doesn't error out if there's a non-string secondary value (ch35189) + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) + end + end + end +end diff --git a/spec/impl/evaluator_segment_spec.rb b/spec/impl/evaluator_segment_spec.rb new file mode 100644 index 00000000..64fb1bc7 --- /dev/null +++ b/spec/impl/evaluator_segment_spec.rb @@ -0,0 +1,125 @@ +require "spec_helper" +require "impl/evaluator_spec_base" + +module LaunchDarkly + module Impl + describe "Evaluator (segments)", :evaluator_spec_base => true do + subject { Evaluator } + + def test_segment_match(segment) + clause = make_segment_match_clause(segment) + flag = boolean_flag_with_clauses([clause]) + e = Evaluator.new(get_nothing, get_things({ segment[:key] => segment }), logger) + e.evaluate(flag, user, factory).detail.value + end + + it "retrieves segment from segment store for segmentMatch operator" do + segment = { + key: 'segkey', + included: [ 'userkey' ], + version: 1, + deleted: false + } + get_segment = get_things({ 'segkey' => segment }) + e = subject.new(get_nothing, get_segment, logger) + user = { key: 'userkey' } + clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } + flag = boolean_flag_with_clauses([clause]) + expect(e.evaluate(flag, user, factory).detail.value).to be true + end + + it "falls through with no errors if referenced segment is not found" do + e = subject.new(get_nothing, get_things({ 'segkey' => nil }), logger) + user = { key: 'userkey' } + clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } + flag = boolean_flag_with_clauses([clause]) + expect(e.evaluate(flag, user, factory).detail.value).to be false + end + + it 'explicitly includes user' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + expect(test_segment_match(segment)).to be true + end + + it 'explicitly excludes user' do + segment = make_segment('segkey') + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(segment)).to be false + end + + it 'both includes and excludes user; include takes priority' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + segment[:excluded] = [ user[:key] ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user by rule when weight is absent' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user by rule when weight is nil' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: nil + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it 'matches user with full rollout' do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 100000 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it "doesn't match user with zero rollout" do + segClause = make_user_matching_clause(user, :email) + segRule = { + clauses: [ segClause ], + weight: 0 + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be false + end + + it "matches user with multiple clauses" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be true + end + + it "doesn't match user with multiple clauses if a clause doesn't match" do + segClause1 = make_user_matching_clause(user, :email) + segClause2 = make_user_matching_clause(user, :name) + segClause2[:values] = [ 'wrong' ] + segRule = { + clauses: [ segClause1, segClause2 ] + } + segment = make_segment('segkey') + segment[:rules] = [ segRule ] + expect(test_segment_match(segment)).to be false + end + end + end +end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 04f99a22..556a69f6 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -1,45 +1,11 @@ require "spec_helper" +require "impl/evaluator_spec_base" module LaunchDarkly module Impl - describe "Evaluator" do + describe "Evaluator", :evaluator_spec_base => true do subject { Evaluator } - let(:factory) { EventFactory.new(false) } - - let(:user) { - { - key: "userkey", - email: "test@example.com", - name: "Bob" - } - } - - let(:logger) { ::Logger.new($stdout, level: ::Logger::FATAL) } - - def get_nothing - lambda { |key| raise "should not have requested #{key}" } - end - - def get_things(map) - lambda { |key| - raise "should not have requested #{key}" if !map.has_key?(key) - map[key] - } - end - - def basic_evaluator - subject.new(get_nothing, get_nothing, logger) - end - - def boolean_flag_with_rules(rules) - { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } - end - - def boolean_flag_with_clauses(clauses) - boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) - end - describe "evaluate" do it "returns off variation if flag is off" do flag = { @@ -319,281 +285,6 @@ def boolean_flag_with_clauses(clauses) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) end - - it "matches user from rules" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(true, 1, - { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "returns an error if rule variation is too high" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "returns an error if rule variation is negative" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "returns an error if rule has neither variation nor rollout" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "returns an error if rule has a rollout with no variations" do - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { variations: [] } } - flag = boolean_flag_with_rules([rule]) - user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail).to eq(detail) - expect(result.events).to eq(nil) - end - - it "coerces user key to a string for evaluation" do - clause = { attribute: 'key', op: 'in', values: ['999'] } - flag = boolean_flag_with_clauses([clause]) - user = { key: 999 } - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.value).to eq(true) - end - - it "coerces secondary key to a string for evaluation" do - # We can't really verify that the rollout calculation works correctly, but we can at least - # make sure it doesn't error out if there's a non-string secondary value (ch35189) - rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], - rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } - flag = boolean_flag_with_rules([rule]) - user = { key: "userkey", secondary: 999 } - result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) - end - end - - describe "clause" do - it "can match built-in attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true - end - - it "can match custom attribute" do - user = { key: 'x', name: 'Bob', custom: { legs: 4 } } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true - end - - it "returns false for missing attribute" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'legs', op: 'in', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false - end - - it "returns false for unknown operator" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'unknown', values: [4] } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false - end - - it "does not stop evaluating rules after clause with unknown operator" do - user = { key: 'x', name: 'Bob' } - clause0 = { attribute: 'name', op: 'unknown', values: [4] } - rule0 = { clauses: [ clause0 ], variation: 1 } - clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } - rule1 = { clauses: [ clause1 ], variation: 1 } - flag = boolean_flag_with_rules([rule0, rule1]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be true - end - - it "can be negated" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } - flag = boolean_flag_with_clauses([clause]) - expect(basic_evaluator.evaluate(flag, user, factory).detail.value).to be false - end - - it "retrieves segment from segment store for segmentMatch operator" do - segment = { - key: 'segkey', - included: [ 'userkey' ], - version: 1, - deleted: false - } - get_segment = get_things({ 'segkey' => segment }) - e = subject.new(get_nothing, get_segment, logger) - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) - expect(e.evaluate(flag, user, factory).detail.value).to be true - end - - it "falls through with no errors if referenced segment is not found" do - e = subject.new(get_nothing, get_things({ 'segkey' => nil }), logger) - user = { key: 'userkey' } - clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - flag = boolean_flag_with_clauses([clause]) - expect(e.evaluate(flag, user, factory).detail.value).to be false - end - - it "can be negated" do - user = { key: 'x', name: 'Bob' } - clause = { attribute: 'name', op: 'in', values: ['Bob'] } - flag = boolean_flag_with_clauses([clause]) - expect { - clause[:negate] = true - }.to change {basic_evaluator.evaluate(flag, user, factory).detail.value}.from(true).to(false) - end - end - - def make_segment(key) - { - key: key, - included: [], - excluded: [], - salt: 'abcdef', - version: 1 - } - end - - def make_segment_match_clause(segment) - { - op: :segmentMatch, - values: [ segment[:key] ], - negate: false - } - end - - def make_user_matching_clause(user, attr) - { - attribute: attr.to_s, - op: :in, - values: [ user[attr.to_sym] ], - negate: false - } - end - - describe 'segment matching' do - def test_segment_match(segment) - clause = make_segment_match_clause(segment) - flag = boolean_flag_with_clauses([clause]) - e = subject.new(get_nothing, get_things({ segment[:key] => segment }), logger) - e.evaluate(flag, user, factory).detail.value - end - - it 'explicitly includes user' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end - - it 'explicitly excludes user' do - segment = make_segment('segkey') - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be false - end - - it 'both includes and excludes user; include takes priority' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] - segment[:excluded] = [ user[:key] ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user by rule when weight is absent' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user by rule when weight is nil' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: nil - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it 'matches user with full rollout' do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 100000 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it "doesn't match user with zero rollout" do - segClause = make_user_matching_clause(user, :email) - segRule = { - clauses: [ segClause ], - weight: 0 - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false - end - - it "matches user with multiple clauses" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be true - end - - it "doesn't match user with multiple clauses if a clause doesn't match" do - segClause1 = make_user_matching_clause(user, :email) - segClause2 = make_user_matching_clause(user, :name) - segClause2[:values] = [ 'wrong' ] - segRule = { - clauses: [ segClause1, segClause2 ] - } - segment = make_segment('segkey') - segment[:rules] = [ segRule ] - expect(test_segment_match(segment)).to be false - end end end end diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb new file mode 100644 index 00000000..fa8b86c3 --- /dev/null +++ b/spec/impl/evaluator_spec_base.rb @@ -0,0 +1,75 @@ +require "spec_helper" + +module LaunchDarkly + module Impl + module EvaluatorSpecBase + def factory + EventFactory.new(false) + end + + def user + { + key: "userkey", + email: "test@example.com", + name: "Bob" + } + end + + def logger + ::Logger.new($stdout, level: ::Logger::FATAL) + end + + def get_nothing + lambda { |key| raise "should not have requested #{key}" } + end + + def get_things(map) + lambda { |key| + raise "should not have requested #{key}" if !map.has_key?(key) + map[key] + } + end + + def basic_evaluator + subject.new(get_nothing, get_nothing, logger) + end + + def boolean_flag_with_rules(rules) + { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } + end + + def boolean_flag_with_clauses(clauses) + boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) + end + + def make_user_matching_clause(user, attr) + { + attribute: attr.to_s, + op: :in, + values: [ user[attr.to_sym] ], + negate: false + } + end + + def make_segment(key) + { + key: key, + included: [], + excluded: [], + salt: 'abcdef', + version: 1 + } + end + + def make_segment_match_clause(segment) + { + op: :segmentMatch, + values: [ segment[:key] ], + negate: false + } + end + end + + RSpec.configure { |c| c.include EvaluatorSpecBase, :evaluator_spec_base => true } + end +end From d06833619ae59c1fc8ce869050afe7703da8cc49 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 16:41:46 -0800 Subject: [PATCH 141/182] make EvaluationReason an immutable class --- lib/ldclient-rb/evaluation_detail.rb | 275 ++++++++++++++++++++++++--- lib/ldclient-rb/impl/evaluator.rb | 22 +-- lib/ldclient-rb/impl/model/flag.rb | 26 +++ lib/ldclient-rb/ldclient.rb | 13 +- spec/evaluation_detail_spec.rb | 135 +++++++++++++ spec/impl/evaluator_rule_spec.rb | 13 +- spec/impl/evaluator_spec.rb | 32 ++-- spec/ldclient_spec.rb | 18 +- 8 files changed, 452 insertions(+), 82 deletions(-) create mode 100644 lib/ldclient-rb/impl/model/flag.rb create mode 100644 spec/evaluation_detail_spec.rb diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index 9db9f0fe..bccaf133 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -3,7 +3,16 @@ module LaunchDarkly # An object returned by {LDClient#variation_detail}, combining the result of a flag evaluation with # an explanation of how it was calculated. class EvaluationDetail + # Creates a new instance. + # + # @param value the result value of the flag evaluation; may be of any type + # @param variation_index [int|nil] the index of the value within the flag's list of variations, or + # `nil` if the application default value was returned + # @param reason [EvaluationReason] an object describing the main factor that influenced the result + # @raise [ArgumentError] if `variation_index` or `reason` is not of the correct type def initialize(value, variation_index, reason) + raise ArgumentError.new("variation_index must be a number") if !variation_index.nil? && !(variation_index.is_a? Numeric) + raise ArgumentError.new("reason must be an EvaluationReason") if !(reason.is_a? EvaluationReason) @value = value @variation_index = variation_index @reason = reason @@ -29,37 +38,7 @@ def initialize(value, variation_index, reason) # # An object describing the main factor that influenced the flag evaluation value. # - # This object is currently represented as a Hash, which may have the following keys: - # - # `:kind`: The general category of reason. Possible values: - # - # * `'OFF'`: the flag was off and therefore returned its configured off value - # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules - # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag - # * `'RULE_MATCH'`: the user matched one of the flag's rules - # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one - # prerequisite flag that either was off or did not return the desired variation - # * `'ERROR'`: the flag could not be evaluated, so the default value was returned - # - # `:ruleIndex`: If the kind was `RULE_MATCH`, this is the positional index of the - # matched rule (0 for the first rule). - # - # `:ruleId`: If the kind was `RULE_MATCH`, this is the rule's unique identifier. - # - # `:prerequisiteKey`: If the kind was `PREREQUISITE_FAILED`, this is the flag key of - # the prerequisite flag that failed. - # - # `:errorKind`: If the kind was `ERROR`, this indicates the type of error: - # - # * `'CLIENT_NOT_READY'`: the caller tried to evaluate a flag before the client had - # successfully initialized - # * `'FLAG_NOT_FOUND'`: the caller provided a flag key that did not match any known flag - # * `'MALFORMED_FLAG'`: there was an internal inconsistency in the flag data, e.g. a - # rule specified a nonexistent variation - # * `'USER_NOT_SPECIFIED'`: the user object or user key was not provied - # * `'EXCEPTION'`: an unexpected exception stopped flag evaluation - # - # @return [Hash] + # @return [EvaluationReason] # attr_reader :reason @@ -77,4 +56,238 @@ def ==(other) @value == other.value && @variation_index == other.variation_index && @reason == other.reason end end + + # Describes the reason that a flag evaluation produced a particular value. This is returned by + # methods such as {LDClient#variation_detail} as the `reason` property of an {EvaluationDetail}. + # + # The `kind` property is always defined, but other properties will have non-nil values only for + # certain values of `kind`. All properties are immutable. + # + # There is a standard JSON representation of evaluation reasons when they appear in analytics events. + # Use `as_json` or `to_json` to convert to this representation. + # + # Use factory methods such as {EvaluationReason#off} to obtain instances of this class. + class EvaluationReason + # Value for {#kind} indicating that the flag was off and therefore returned its configured off value. + OFF = :OFF + + # Value for {#kind} indicating that the flag was on but the user did not match any targets or rules. + FALLTHROUGH = :FALLTHROUGH + + # Value for {#kind} indicating that the user key was specifically targeted for this flag. + TARGET_MATCH = :TARGET_MATCH + + # Value for {#kind} indicating that the user matched one of the flag's rules. + RULE_MATCH = :RULE_MATCH + + # Value for {#kind} indicating that the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation. + PREREQUISITE_FAILED = :PREREQUISITE_FAILED + + # Value for {#kind} indicating that the flag could not be evaluated, e.g. because it does not exist + # or due to an unexpected error. In this case the result value will be the application default value + # that the caller passed to the client. Check {#error_kind} for more details on the problem. + ERROR = :ERROR + + # Value for {#error_kind} indicating that the caller tried to evaluate a flag before the client had + # successfully initialized. + ERROR_CLIENT_NOT_READY = :CLIENT_NOT_READY + + # Value for {#error_kind} indicating that the caller provided a flag key that did not match any known flag. + ERROR_FLAG_NOT_FOUND = :FLAG_NOT_FOUND + + # Value for {#error_kind} indicating that there was an internal inconsistency in the flag data, e.g. + # a rule specified a nonexistent variation. An error message will always be logged in this case. + ERROR_MALFORMED_FLAG = :MALFORMED_FLAG + + # Value for {#error_kind} indicating that the caller passed `nil` for the user parameter, or the + # user lacked a key. + ERROR_USER_NOT_SPECIFIED = :USER_NOT_SPECIFIED + + # Value for {#error_kind} indicating that an unexpected exception stopped flag evaluation. An error + # message will always be logged in this case. + ERROR_EXCEPTION = :EXCEPTION + + # Indicates the general category of the reason. Will always be one of the class constants such + # as {#OFF}. + attr_reader :kind + + # The index of the rule that was matched (0 for the first rule in the feature flag). If + # {#kind} is not {#RULE_MATCH}, this will be `nil`. + attr_reader :rule_index + + # A unique string identifier for the matched rule, which will not change if other rules are added + # or deleted. If {#kind} is not {#RULE_MATCH}, this will be `nil`. + attr_reader :rule_id + + # The key of the prerequisite flag that did not return the desired variation. If {#kind} is not + # {#PREREQUISITE_FAILED}, this will be `nil`. + attr_reader :prerequisite_key + + # A value indicating the general category of error. This should be one of the class constants such + # as {#ERROR_FLAG_NOT_FOUND}. If {#kind} is not {#ERROR}, it will be `nil`. + attr_reader :error_kind + + # Returns an instance whose {#kind} is {#OFF}. + # @return [EvaluationReason] + def self.off + @@off + end + + # Returns an instance whose {#kind} is {#FALLTHROUGH}. + # @return [EvaluationReason] + def self.fallthrough + @@fallthrough + end + + # Returns an instance whose {#kind} is {#TARGET_MATCH}. + # @return [EvaluationReason] + def self.target_match + @@target_match + end + + # Returns an instance whose {#kind} is {#RULE_MATCH}. + # + # @param rule_index [Number] the index of the rule that was matched (0 for the first rule in + # the feature flag) + # @param rule_id [String] unique string identifier for the matched rule + # @return [EvaluationReason] + # @raise [ArgumentError] if `rule_index` is not a number or `rule_id` is not a string + def self.rule_match(rule_index, rule_id) + raise ArgumentError.new("rule_index must be a number") if !(rule_index.is_a? Numeric) + raise ArgumentError.new("rule_id must be a string") if !rule_id.nil? && !(rule_id.is_a? String) # in test data, ID could be nil + new(:RULE_MATCH, rule_index, rule_id, nil, nil) + end + + # Returns an instance whose {#kind} is {#PREREQUISITE_FAILED}. + # + # @param prerequisite_key [String] key of the prerequisite flag that did not return the desired variation + # @return [EvaluationReason] + # @raise [ArgumentError] if `prerequisite_key` is nil or not a string + def self.prerequisite_failed(prerequisite_key) + raise ArgumentError.new("prerequisite_key must be a string") if !(prerequisite_key.is_a? String) + new(:PREREQUISITE_FAILED, nil, nil, prerequisite_key, nil) + end + + # Returns an instance whose {#kind} is {#ERROR}. + # + # @param error_kind [Symbol] value indicating the general category of error + # @return [EvaluationReason] + # @raise [ArgumentError] if `error_kind` is not a symbol + def self.error(error_kind) + raise ArgumentError.new("error_kind must be a symbol") if !(error_kind.is_a? Symbol) + e = @@error_instances[error_kind] + e.nil? ? make_error(error_kind) : e + end + + def ==(other) + if other.is_a? EvaluationReason + @kind == other.kind && @rule_index == other.rule_index && @rule_id == other.rule_id && + @prerequisite_key == other.prerequisite_key && @error_kind == other.error_kind + elsif other.is_a? Hash + @kind.to_s == other[:kind] && @rule_index == other[:ruleIndex] && @rule_id == other[:ruleId] && + @prerequisite_key == other[:prerequisiteKey] && + (other[:errorKind] == @error_kind.nil? ? nil : @error_kind.to_s) + end + end + + # Equivalent to {#inspect}. + # @return [String] + def to_s + inspect + end + + # Returns a concise string representation of the reason. Examples: `"FALLTHROUGH"`, + # `"ERROR(FLAG_NOT_FOUND)"`. The exact syntax is not guaranteed to remain the same; this is meant + # for debugging. + # @return [String] + def inspect + case @kind + when :RULE_MATCH + "RULE_MATCH(#{@rule_index},#{@rule_id})" + when :PREREQUISITE_FAILED + "PREREQUISITE_FAILED(#{@prerequisite_key})" + when :ERROR + "ERROR(#{@error_kind})" + else + @kind.to_s + end + end + + # Returns a hash that can be used as a JSON representation of the reason, in the format used + # in LaunchDarkly analytics events. + # @return [Hash] + def as_json(*) # parameter is unused, but may be passed if we're using the json gem + # Note that this implementation is somewhat inefficient; it allocates a new hash every time. + # However, in normal usage the SDK only serializes reasons if 1. full event tracking is + # enabled for a flag and the application called variation_detail, or 2. experimentation is + # enabled for an evaluation. We can't reuse these hashes because an application could call + # as_json and then modify the result. + case @kind + when :RULE_MATCH + { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id } + when :PREREQUISITE_FAILED + { kind: @kind, prerequisiteKey: @prerequisite_key } + when :ERROR + { kind: @kind, errorKind: @error_kind } + else + { kind: @kind } + end + end + + # Same as {#as_json}, but converts the JSON structure into a string. + # @return [String] + def to_json(*a) + as_json.to_json(a) + end + + # Allows this object to be treated as a hash corresponding to its JSON representation. For + # instance, if `reason.kind` is {#RULE_MATCH}, then `reason[:kind]` will be `"RULE_MATCH"` and + # `reason[:ruleIndex]` will be equal to `reason.rule_index`. + def [](key) + case key + when :kind + @kind.to_s + when :ruleIndex + @rule_index + when :ruleId + @rule_id + when :prerequisiteKey + @prerequisite_key + when :errorKind + @error_kind.nil? ? nil : @error_kind.to_s + else + nil + end + end + + private + + def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind) + @kind = kind.to_sym + @rule_index = rule_index + @rule_id = rule_id + @rule_id.freeze if !rule_id.nil? + @prerequisite_key = prerequisite_key + @prerequisite_key.freeze if !prerequisite_key.nil? + @error_kind = error_kind + end + + private_class_method :new + + def self.make_error(error_kind) + new(:ERROR, nil, nil, nil, error_kind) + end + + @@fallthrough = new(:FALLTHROUGH, nil, nil, nil, nil) + @@off = new(:OFF, nil, nil, nil, nil) + @@target_match = new(:TARGET_MATCH, nil, nil, nil, nil) + @@error_instances = { + ERROR_CLIENT_NOT_READY => make_error(ERROR_CLIENT_NOT_READY), + ERROR_FLAG_NOT_FOUND => make_error(ERROR_FLAG_NOT_FOUND), + ERROR_MALFORMED_FLAG => make_error(ERROR_MALFORMED_FLAG), + ERROR_USER_NOT_SPECIFIED => make_error(ERROR_USER_NOT_SPECIFIED), + ERROR_EXCEPTION => make_error(ERROR_EXCEPTION) + } + end end diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index abcde944..0bc78552 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -29,7 +29,7 @@ def initialize(get_flag, get_segment, logger) # Helper function used internally to construct an EvaluationDetail for an error result. def self.error_result(errorKind, value = nil) - EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) + EvaluationDetail.new(value, nil, EvaluationReason.error(errorKind)) end # The client's entry point for evaluating a flag. The returned `EvalResult` contains the evaluation result and @@ -43,7 +43,7 @@ def self.error_result(errorKind, value = nil) # @return [EvalResult] the evaluation result def evaluate(flag, user, event_factory) if user.nil? || user[:key].nil? - return EvalResult.new(Evaluator.error_result('USER_NOT_SPECIFIED'), []) + return EvalResult.new(Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED), []) end # If the flag doesn't have any prerequisites (which most flags don't) then it cannot generate any feature @@ -62,7 +62,7 @@ def evaluate(flag, user, event_factory) def eval_internal(flag, user, events, event_factory) if !flag[:on] - return get_off_value(flag, { kind: 'OFF' }) + return get_off_value(flag, EvaluationReason::off) end prereq_failure_reason = check_prerequisites(flag, user, events, event_factory) @@ -74,7 +74,7 @@ def eval_internal(flag, user, events, event_factory) (flag[:targets] || []).each do |target| (target[:values] || []).each do |value| if value == user[:key] - return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + return get_variation(flag, target[:variation], EvaluationReason::target_match) end end end @@ -84,18 +84,16 @@ def eval_internal(flag, user, events, event_factory) rules.each_index do |i| rule = rules[i] if rule_match_user(rule, user) - return get_value_for_variation_or_rollout(flag, rule, user, - { kind: 'RULE_MATCH', ruleIndex: i, ruleId: rule[:id] }) + return get_value_for_variation_or_rollout(flag, rule, user, EvaluationReason::rule_match(i, rule[:id])) end end # Check the fallthrough rule if !flag[:fallthrough].nil? - return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, - { kind: 'FALLTHROUGH' }) + return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, EvaluationReason::fallthrough) end - return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) + return EvaluationDetail.new(nil, nil, EvaluationReason::fallthrough) end def check_prerequisites(flag, user, events, event_factory) @@ -123,7 +121,7 @@ def check_prerequisites(flag, user, events, event_factory) end end if !prereq_ok - return { kind: 'PREREQUISITE_FAILED', prerequisiteKey: prereq_key } + return EvaluationReason::prerequisite_failed(prereq_key) end end nil @@ -219,7 +217,7 @@ def segment_rule_match_user(rule, user, segment_key, salt) def get_variation(flag, index, reason) if index < 0 || index >= flag[:variations].length @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") - return Evaluator.error_result('MALFORMED_FLAG') + return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) end EvaluationDetail.new(flag[:variations][index], index, reason) end @@ -235,7 +233,7 @@ def get_value_for_variation_or_rollout(flag, vr, user, reason) index = variation_index_for_user(flag, vr, user) if index.nil? @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") - return Evaluator.error_result('MALFORMED_FLAG') + return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) end return get_variation(flag, index, reason) end diff --git a/lib/ldclient-rb/impl/model/flag.rb b/lib/ldclient-rb/impl/model/flag.rb new file mode 100644 index 00000000..a4de3d17 --- /dev/null +++ b/lib/ldclient-rb/impl/model/flag.rb @@ -0,0 +1,26 @@ + +module LaunchDarkly + module Impl + module Model + # Called after we have deserialized a flag from JSON (because we received it from LaunchDarkly, or + # read it from a persistent data store). Generates immutable instances of every parameterized + # evaluation reason that could be generated by this flag, so we can avoid creating new reason + # instances during evaluations. + def preprocess_flag_after_deserializing(flag) + prereqs = flag[:prerequisites] + if !prereqs.nil? + prereqs.each do |prereq| + prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key]) + end + end + rules = flag[:rules] + if !rules.nil? + rules.each_index do |i| + rule = rules[i] + rule[:_reason] = EvaluationReason::rule_match(i, rule[:id]) + end + end + end + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 8b22feca..35c1bc41 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -319,7 +319,8 @@ def all_flags_state(user, options={}) details_only_if_tracked) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) - state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil, details_only_if_tracked) + state.add_flag(f, nil, nil, with_reasons ? EvaluationReason::error(EvaluationReason::ERROR_EXCEPTION) : nil, + details_only_if_tracked) end end @@ -356,7 +357,7 @@ def create_default_data_source(sdk_key, config) # @return [EvaluationDetail] def evaluate_internal(key, user, default, event_factory) if @config.offline? - return Evaluator.error_result('CLIENT_NOT_READY', default) + return Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) end if !initialized? @@ -364,7 +365,7 @@ def evaluate_internal(key, user, default, event_factory) @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - detail = Evaluator.error_result('CLIENT_NOT_READY', default) + detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end @@ -374,14 +375,14 @@ def evaluate_internal(key, user, default, event_factory) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } - detail = Evaluator.error_result('FLAG_NOT_FOUND', default) + detail = Evaluator.error_result(EvaluationReason::ERROR_FLAG_NOT_FOUND, default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end unless user @config.logger.error { "[LDClient] Must specify user" } - detail = Evaluator.error_result('USER_NOT_SPECIFIED', default) + detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end @@ -401,7 +402,7 @@ def evaluate_internal(key, user, default, event_factory) return detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) - detail = Evaluator.error_result('EXCEPTION', default) + detail = Evaluator.error_result(EvaluationReason::ERROR_EXCEPTION, default) @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end diff --git a/spec/evaluation_detail_spec.rb b/spec/evaluation_detail_spec.rb new file mode 100644 index 00000000..6c3c713e --- /dev/null +++ b/spec/evaluation_detail_spec.rb @@ -0,0 +1,135 @@ +require "spec_helper" + +module LaunchDarkly + describe "EvaluationDetail" do + subject { EvaluationDetail } + + it "sets properties" do + expect(EvaluationDetail.new("x", 0, EvaluationReason::off).value).to eq "x" + expect(EvaluationDetail.new("x", 0, EvaluationReason::off).variation_index).to eq 0 + expect(EvaluationDetail.new("x", 0, EvaluationReason::off).reason).to eq EvaluationReason::off + end + + it "checks parameter types" do + expect { EvaluationDetail.new(nil, nil, EvaluationReason::off) }.not_to raise_error + expect { EvaluationDetail.new(nil, 0, EvaluationReason::off) }.not_to raise_error + expect { EvaluationDetail.new(nil, "x", EvaluationReason::off) }.to raise_error(ArgumentError) + expect { EvaluationDetail.new(nil, 0, { kind: "OFF" }) }.to raise_error(ArgumentError) + expect { EvaluationDetail.new(nil, 0, nil) }.to raise_error(ArgumentError) + end + + it "equality test" do + expect(EvaluationDetail.new("x", 0, EvaluationReason::off)).to eq EvaluationDetail.new("x", 0, EvaluationReason::off) + expect(EvaluationDetail.new("x", 0, EvaluationReason::off)).not_to eq EvaluationDetail.new("y", 0, EvaluationReason::off) + expect(EvaluationDetail.new("x", 0, EvaluationReason::off)).not_to eq EvaluationDetail.new("x", 1, EvaluationReason::off) + expect(EvaluationDetail.new("x", 0, EvaluationReason::off)).not_to eq EvaluationDetail.new("x", 0, EvaluationReason::fallthrough) + end + end + + describe "EvaluationReason" do + subject { EvaluationReason } + + values = [ + [ EvaluationReason::off, EvaluationReason::OFF, { "kind" => "OFF" }, "OFF", nil ], + [ EvaluationReason::fallthrough, EvaluationReason::FALLTHROUGH, + { "kind" => "FALLTHROUGH" }, "FALLTHROUGH", nil ], + [ EvaluationReason::target_match, EvaluationReason::TARGET_MATCH, + { "kind" => "TARGET_MATCH" }, "TARGET_MATCH", nil ], + [ EvaluationReason::rule_match(1, "x"), EvaluationReason::RULE_MATCH, + { "kind" => "RULE_MATCH", "ruleIndex" => 1, "ruleId" => "x" }, "RULE_MATCH(1,x)", + [ EvaluationReason::rule_match(2, "x"), EvaluationReason::rule_match(1, "y") ] ], + [ EvaluationReason::prerequisite_failed("x"), EvaluationReason::PREREQUISITE_FAILED, + { "kind" => "PREREQUISITE_FAILED", "prerequisiteKey" => "x" }, "PREREQUISITE_FAILED(x)" ], + [ EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND), EvaluationReason::ERROR, + { "kind" => "ERROR", "errorKind" => "FLAG_NOT_FOUND" }, "ERROR(FLAG_NOT_FOUND)" ] + ] + values.each_index do |i| + params = values[i] + reason = params[0] + kind = params[1] + json_rep = params[2] + brief_str = params[3] + unequal_values = params[4] + + describe "reason #{reason.kind}" do + it "has correct kind" do + expect(reason.kind).to eq kind + end + + it "equality to self" do + expect(reason).to eq reason + end + + it "inequality to others" do + values.each_index do |j| + if i != j + expect(reason).not_to eq values[j][0] + end + end + if !unequal_values.nil? + unequal_values.each do |v| + expect(reason).not_to eq v + end + end + end + + it "JSON representation" do + expect(JSON.parse(reason.as_json.to_json)).to eq json_rep + expect(JSON.parse(reason.to_json)).to eq json_rep + end + + it "brief representation" do + expect(reason.inspect).to eq brief_str + expect(reason.to_s).to eq brief_str + end + end + end + + it "reuses singleton reasons" do + expect(EvaluationReason::off).to be EvaluationReason::off + expect(EvaluationReason::fallthrough).to be EvaluationReason::fallthrough + expect(EvaluationReason::target_match).to be EvaluationReason::target_match + expect(EvaluationReason::rule_match(1, 'x')).not_to be EvaluationReason::rule_match(1, 'x') + expect(EvaluationReason::prerequisite_failed('x')).not_to be EvaluationReason::prerequisite_failed('x') + errors = [ EvaluationReason::ERROR_CLIENT_NOT_READY, EvaluationReason::ERROR_FLAG_NOT_FOUND, + EvaluationReason::ERROR_MALFORMED_FLAG, EvaluationReason::ERROR_USER_NOT_SPECIFIED, EvaluationReason::ERROR_EXCEPTION ] + errors.each do |e| + expect(EvaluationReason::error(e)).to be EvaluationReason::error(e) + end + end + + it "supports [] with JSON property names" do + expect(EvaluationReason::off[:kind]).to eq "OFF" + expect(EvaluationReason::off[:ruleIndex]).to be nil + expect(EvaluationReason::off[:ruleId]).to be nil + expect(EvaluationReason::off[:prerequisiteKey]).to be nil + expect(EvaluationReason::off[:errorKind]).to be nil + expect(EvaluationReason::rule_match(1, "x")[:ruleIndex]).to eq 1 + expect(EvaluationReason::rule_match(1, "x")[:ruleId]).to eq "x" + expect(EvaluationReason::prerequisite_failed("x")[:prerequisiteKey]).to eq "x" + expect(EvaluationReason::error(EvaluationReason::ERROR_FLAG_NOT_FOUND)[:errorKind]).to eq "FLAG_NOT_FOUND" + end + + it "freezes string properties" do + rm = EvaluationReason::rule_match(1, "x") + expect { rm.rule_id.upcase! }.to raise_error(FrozenError) + pf = EvaluationReason::prerequisite_failed("x") + expect { pf.prerequisite_key.upcase! }.to raise_error(FrozenError) + end + + it "checks parameter types" do + expect { EvaluationReason::rule_match(nil, "x") }.to raise_error(ArgumentError) + expect { EvaluationReason::rule_match(true, "x") }.to raise_error(ArgumentError) + expect { EvaluationReason::rule_match(1, nil) }.not_to raise_error # we allow nil rule_id for backward compatibility + expect { EvaluationReason::rule_match(1, 9) }.to raise_error(ArgumentError) + expect { EvaluationReason::prerequisite_failed(nil) }.to raise_error(ArgumentError) + expect { EvaluationReason::prerequisite_failed(9) }.to raise_error(ArgumentError) + expect { EvaluationReason::error(nil) }.to raise_error(ArgumentError) + expect { EvaluationReason::error(9) }.to raise_error(ArgumentError) + end + + it "does not allow direct access to constructor" do + expect { EvaluationReason.new(:off, nil, nil, nil, nil) }.to raise_error(NoMethodError) + end + end +end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index ee2e315b..bcbbcbdd 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -10,8 +10,7 @@ module Impl rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } - detail = EvaluationDetail.new(true, 1, - { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -22,7 +21,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -33,7 +32,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -44,7 +43,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -56,7 +55,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -78,7 +77,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) + expect(result.detail.reason).to eq(EvaluationReason::rule_match(0, 'ruleid')) end end end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 556a69f6..6fca0315 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -16,7 +16,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, { kind: 'OFF' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::off) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -30,7 +30,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'x' } - detail = EvaluationDetail.new(nil, nil, { kind: 'OFF' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::off) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -46,7 +46,7 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -62,7 +62,7 @@ module Impl } user = { key: 'x' } detail = EvaluationDetail.new(nil, nil, - { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -78,8 +78,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('badfeature')) e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) result = e.evaluate(flag, user, factory) expect(result.detail).to eq(detail) @@ -105,8 +104,7 @@ module Impl version: 2 } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) events_should_be = [{ kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' }] @@ -137,8 +135,7 @@ module Impl version: 2 } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] @@ -167,8 +164,7 @@ module Impl version: 2 } user = { key: 'x' } - detail = EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + detail = EvaluationDetail.new('b', 1, EvaluationReason::prerequisite_failed('feature1')) events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' }] @@ -197,7 +193,7 @@ module Impl version: 2 } user = { key: 'x' } - detail = EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) + detail = EvaluationDetail.new('a', 0, EvaluationReason::fallthrough) events_should_be = [{ kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] @@ -217,7 +213,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -232,7 +228,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -247,7 +243,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -262,7 +258,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_MALFORMED_FLAG)) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) @@ -280,7 +276,7 @@ module Impl variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - detail = EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) + detail = EvaluationDetail.new('c', 2, EvaluationReason::target_match) result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(nil) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 4672a662..5ca9b9fd 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -157,7 +157,7 @@ def event_processor value: 'value', default: 'default', trackEvents: true, - reason: { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'id' } + reason: LaunchDarkly::EvaluationReason::rule_match(0, 'id') )) client.variation('flag', user, 'default') end @@ -182,7 +182,7 @@ def event_processor value: 'value', default: 'default', trackEvents: true, - reason: { kind: 'FALLTHROUGH' } + reason: LaunchDarkly::EvaluationReason::fallthrough )) client.variation('flag', user, 'default') end @@ -194,20 +194,22 @@ def event_processor it "returns the default value if the client is offline" do result = offline_client.variation_detail("doesntmatter", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'CLIENT_NOT_READY' }) + expected = LaunchDarkly::EvaluationDetail.new("default", nil, + LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_CLIENT_NOT_READY)) expect(result).to eq expected end it "returns the default value for an unknown feature" do result = client.variation_detail("badkey", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND'}) + expected = LaunchDarkly::EvaluationDetail.new("default", nil, + LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND)) expect(result).to eq expected end it "queues a feature request event for an unknown feature" do expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "badkey", user: user, value: "default", default: "default", - reason: { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND' } + reason: LaunchDarkly::EvaluationReason::error(LaunchDarkly::EvaluationReason::ERROR_FLAG_NOT_FOUND) )) client.variation_detail("badkey", user, "default") end @@ -216,7 +218,7 @@ def event_processor config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) result = client.variation_detail("key", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("value", 0, { kind: 'OFF' }) + expected = LaunchDarkly::EvaluationDetail.new("value", 0, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected end @@ -225,7 +227,7 @@ def event_processor config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) result = client.variation_detail("key", user, "default") - expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'OFF' }) + expected = LaunchDarkly::EvaluationDetail.new("default", nil, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected expect(result.default_value?).to be true end @@ -243,7 +245,7 @@ def event_processor default: "default", trackEvents: true, debugEventsUntilDate: 1000, - reason: { kind: "OFF" } + reason: LaunchDarkly::EvaluationReason::off )) client.variation_detail("key", user, "default") end From 4bf9abc57189948066a41ed10144364172010b15 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 16:46:16 -0800 Subject: [PATCH 142/182] FrozenError doesn't exist in older Ruby, use more general RuntimeError --- spec/evaluation_detail_spec.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spec/evaluation_detail_spec.rb b/spec/evaluation_detail_spec.rb index 6c3c713e..3d7418ed 100644 --- a/spec/evaluation_detail_spec.rb +++ b/spec/evaluation_detail_spec.rb @@ -112,9 +112,9 @@ module LaunchDarkly it "freezes string properties" do rm = EvaluationReason::rule_match(1, "x") - expect { rm.rule_id.upcase! }.to raise_error(FrozenError) + expect { rm.rule_id.upcase! }.to raise_error(RuntimeError) pf = EvaluationReason::prerequisite_failed("x") - expect { pf.prerequisite_key.upcase! }.to raise_error(FrozenError) + expect { pf.prerequisite_key.upcase! }.to raise_error(RuntimeError) end it "checks parameter types" do From d8f5263e4d72e265eb5eec16a9cf93a97f83ab56 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 20:19:56 -0800 Subject: [PATCH 143/182] precompute evaluation reasons when we receive a flag --- lib/ldclient-rb/impl/evaluator.rb | 7 ++- .../impl/integrations/consul_impl.rb | 10 +-- .../impl/integrations/dynamodb_impl.rb | 10 +-- .../impl/integrations/redis_impl.rb | 10 ++- lib/ldclient-rb/impl/model/flag.rb | 26 -------- lib/ldclient-rb/impl/model/serialization.rb | 62 +++++++++++++++++++ lib/ldclient-rb/polling.rb | 5 +- lib/ldclient-rb/requestor.rb | 15 +++-- lib/ldclient-rb/stream.rb | 17 +++-- spec/impl/evaluator_rule_spec.rb | 12 ++++ spec/impl/evaluator_spec.rb | 20 +++++- spec/impl/model/serialization_spec.rb | 41 ++++++++++++ spec/polling_spec.rb | 4 +- spec/requestor_spec.rb | 22 +++---- 14 files changed, 186 insertions(+), 75 deletions(-) delete mode 100644 lib/ldclient-rb/impl/model/flag.rb create mode 100644 lib/ldclient-rb/impl/model/serialization.rb create mode 100644 spec/impl/model/serialization_spec.rb diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index 0bc78552..aa2b9e12 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -84,7 +84,9 @@ def eval_internal(flag, user, events, event_factory) rules.each_index do |i| rule = rules[i] if rule_match_user(rule, user) - return get_value_for_variation_or_rollout(flag, rule, user, EvaluationReason::rule_match(i, rule[:id])) + reason = rule[:_reason] # try to use cached reason for this rule + reason = EvaluationReason::rule_match(i, rule[:id]) if reason.nil? + return get_value_for_variation_or_rollout(flag, rule, user, reason) end end @@ -121,7 +123,8 @@ def check_prerequisites(flag, user, events, event_factory) end end if !prereq_ok - return EvaluationReason::prerequisite_failed(prereq_key) + reason = prerequisite[:_reason] # try to use cached reason + return reason.nil? ? EvaluationReason::prerequisite_failed(prereq_key) : reason end end nil diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 10c16dbc..2f186dab 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -39,7 +39,7 @@ def init_internal(all_data) # Insert or update every provided item all_data.each do |kind, items| items.values.each do |item| - value = item.to_json + value = Model.serialize(kind, item) key = item_key(kind, item[:key]) ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => value } }) unused_old_keys.delete(key) @@ -62,7 +62,7 @@ def init_internal(all_data) def get_internal(kind, key) value = Diplomat::Kv.get(item_key(kind, key), {}, :return) # :return means "don't throw an error if not found" - (value.nil? || value == "") ? nil : JSON.parse(value, symbolize_names: true) + (value.nil? || value == "") ? nil : Model.deserialize(kind, value) end def get_all_internal(kind) @@ -71,7 +71,7 @@ def get_all_internal(kind) (results == "" ? [] : results).each do |result| value = result[:value] if !value.nil? - item = JSON.parse(value, symbolize_names: true) + item = Model.deserialize(kind, value) items_out[item[:key].to_sym] = item end end @@ -80,7 +80,7 @@ def get_all_internal(kind) def upsert_internal(kind, new_item) key = item_key(kind, new_item[:key]) - json = new_item.to_json + json = Model.serialize(kind, new_item) # We will potentially keep retrying indefinitely until someone's write succeeds while true @@ -88,7 +88,7 @@ def upsert_internal(kind, new_item) if old_value.nil? || old_value == "" mod_index = 0 else - old_item = JSON.parse(old_value[0]["Value"], symbolize_names: true) + old_item = Model.deserialize(kind, old_value[0]["Value"]) # Check whether the item is stale. If so, don't do the update (and return the existing item to # FeatureStoreWrapper so it can be cached) if old_item[:version] >= new_item[:version] diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index a76fae52..464eb5e4 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -77,7 +77,7 @@ def init_internal(all_data) def get_internal(kind, key) resp = get_item_by_keys(namespace_for_kind(kind), key) - unmarshal_item(resp.item) + unmarshal_item(kind, resp.item) end def get_all_internal(kind) @@ -86,7 +86,7 @@ def get_all_internal(kind) while true resp = @client.query(req) resp.items.each do |item| - item_out = unmarshal_item(item) + item_out = unmarshal_item(kind, item) items_out[item_out[:key].to_sym] = item_out end break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 @@ -196,15 +196,15 @@ def read_existing_keys(kinds) def marshal_item(kind, item) make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ VERSION_ATTRIBUTE => item[:version], - ITEM_JSON_ATTRIBUTE => item.to_json + ITEM_JSON_ATTRIBUTE => Model.serialize(kind, item) }) end - def unmarshal_item(item) + def unmarshal_item(kind, item) return nil if item.nil? || item.length == 0 json_attr = item[ITEM_JSON_ATTRIBUTE] raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? - JSON.parse(json_attr, symbolize_names: true) + Model.deserialize(kind, json_attr) end end diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 107340f8..ac16a976 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -53,7 +53,7 @@ def init_internal(all_data) multi.del(items_key(kind)) count = count + items.count items.each do |key, item| - multi.hset(items_key(kind), key, item.to_json) + multi.hset(items_key(kind), key, Model.serialize(kind,item)) end end multi.set(inited_key, inited_key) @@ -73,8 +73,7 @@ def get_all_internal(kind) with_connection do |redis| hashfs = redis.hgetall(items_key(kind)) hashfs.each do |k, json_item| - f = JSON.parse(json_item, symbolize_names: true) - fs[k.to_sym] = f + fs[k.to_sym] = Model.deserialize(kind, json_item) end end fs @@ -93,7 +92,7 @@ def upsert_internal(kind, new_item) before_update_transaction(base_key, key) if old_item.nil? || old_item[:version] < new_item[:version] result = redis.multi do |multi| - multi.hset(base_key, key, new_item.to_json) + multi.hset(base_key, key, Model.serialize(kind, new_item)) end if result.nil? @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } @@ -145,8 +144,7 @@ def with_connection end def get_redis(redis, kind, key) - json_item = redis.hget(items_key(kind), key) - json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) + Model.deserialize(kind, redis.hget(items_key(kind), key)) end end end diff --git a/lib/ldclient-rb/impl/model/flag.rb b/lib/ldclient-rb/impl/model/flag.rb deleted file mode 100644 index a4de3d17..00000000 --- a/lib/ldclient-rb/impl/model/flag.rb +++ /dev/null @@ -1,26 +0,0 @@ - -module LaunchDarkly - module Impl - module Model - # Called after we have deserialized a flag from JSON (because we received it from LaunchDarkly, or - # read it from a persistent data store). Generates immutable instances of every parameterized - # evaluation reason that could be generated by this flag, so we can avoid creating new reason - # instances during evaluations. - def preprocess_flag_after_deserializing(flag) - prereqs = flag[:prerequisites] - if !prereqs.nil? - prereqs.each do |prereq| - prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key]) - end - end - rules = flag[:rules] - if !rules.nil? - rules.each_index do |i| - rule = rules[i] - rule[:_reason] = EvaluationReason::rule_match(i, rule[:id]) - end - end - end - end - end -end diff --git a/lib/ldclient-rb/impl/model/serialization.rb b/lib/ldclient-rb/impl/model/serialization.rb new file mode 100644 index 00000000..fcf8b135 --- /dev/null +++ b/lib/ldclient-rb/impl/model/serialization.rb @@ -0,0 +1,62 @@ + +module LaunchDarkly + module Impl + module Model + # Abstraction of deserializing a feature flag or segment that was read from a data store or + # received from LaunchDarkly. + def self.deserialize(kind, json) + return nil if json.nil? + item = JSON.parse(json, symbolize_names: true) + postprocess_item_after_deserializing!(kind, item) + item + end + + # Abstraction of serializing a feature flag or segment that will be written to a data store. + # Currently we just call to_json. + def self.serialize(kind, item) + item.to_json + end + + # Translates a { flags: ..., segments: ... } object received from LaunchDarkly to the data store format. + def self.make_all_store_data(received_data) + flags = received_data[:flags] + postprocess_items_after_deserializing!(FEATURES, flags) + segments = received_data[:segments] + postprocess_items_after_deserializing!(SEGMENTS, segments) + { FEATURES => flags, SEGMENTS => segments } + end + + # Called after we have deserialized a model item from JSON (because we received it from LaunchDarkly, + # or read it from a persistent data store). This allows us to precompute some derived attributes that + # will never change during the lifetime of that item. + def self.postprocess_item_after_deserializing!(kind, item) + return if !item + # Currently we are special-casing this for FEATURES; eventually it will be handled by delegating + # to the "kind" object or the item class. + if kind.eql? FEATURES + # For feature flags, we precompute all possible parameterized EvaluationReason instances. + prereqs = item[:prerequisites] + if !prereqs.nil? + prereqs.each do |prereq| + prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key]) + end + end + rules = item[:rules] + if !rules.nil? + rules.each_index do |i| + rule = rules[i] + rule[:_reason] = EvaluationReason::rule_match(i, rule[:id]) + end + end + end + end + + def self.postprocess_items_after_deserializing!(kind, items_map) + return items_map if !items_map + items_map.each do |key, item| + postprocess_item_after_deserializing!(kind, item) + end + end + end + end +end diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index da0427dc..a9312413 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -37,10 +37,7 @@ def stop def poll all_data = @requestor.request_all_data if all_data - @config.feature_store.init({ - FEATURES => all_data[:flags], - SEGMENTS => all_data[:segments] - }) + @config.feature_store.init(all_data) if @initialized.make_true @config.logger.info { "[LDClient] Polling connection initialized" } @ready.set diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index f7174787..3f085c7c 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -1,3 +1,5 @@ +require "ldclient-rb/impl/model/serialization" + require "concurrent/atomics" require "json" require "uri" @@ -27,15 +29,16 @@ def initialize(sdk_key, config) end def request_flag(key) - make_request("/sdk/latest-flags/" + key) + request_single_item(FEATURES, "/sdk/latest-flags/" + key) end def request_segment(key) - make_request("/sdk/latest-segments/" + key) + request_single_item(SEGMENTS, "/sdk/latest-segments/" + key) end def request_all_data() - make_request("/sdk/latest-all") + all_data = JSON.parse(make_request("/sdk/latest-all"), symbolize_names: true) + Impl::Model.make_all_store_data(all_data) end def stop @@ -47,6 +50,10 @@ def stop private + def request_single_item(kind, path) + Impl::Model.deserialize(kind, make_request(path)) + end + def make_request(path) @client.start if !@client.started? uri = URI(@config.base_uri + path) @@ -73,7 +80,7 @@ def make_request(path) etag = res["etag"] @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end - JSON.parse(body, symbolize_names: true) + body end def fix_encoding(body, content_type) diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index ddb7f669..9add0593 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -1,3 +1,5 @@ +require "ldclient-rb/impl/model/serialization" + require "concurrent/atomics" require "json" require "ld-eventsource" @@ -86,10 +88,8 @@ def process_message(message) @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } if method == PUT message = JSON.parse(message.data, symbolize_names: true) - @feature_store.init({ - FEATURES => message[:data][:flags], - SEGMENTS => message[:data][:segments] - }) + all_data = Impl::Model.make_all_store_data(message[:data]) + @feature_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized" } @ready.set @@ -98,7 +98,9 @@ def process_message(message) for kind in [FEATURES, SEGMENTS] key = key_for_path(kind, data[:path]) if key - @feature_store.upsert(kind, data[:data]) + data = data[:data] + Impl::Model.postprocess_item_after_deserializing!(kind, data) + @feature_store.upsert(kind, data) break end end @@ -113,10 +115,7 @@ def process_message(message) end elsif method == INDIRECT_PUT all_data = @requestor.request_all_data - @feature_store.init({ - FEATURES => all_data[:flags], - SEGMENTS => all_data[:segments] - }) + @feature_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized (via indirect message)" } elsif method == INDIRECT_PATCH diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index bcbbcbdd..a1ae5d66 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -16,6 +16,18 @@ module Impl expect(result.events).to eq(nil) end + it "reuses rule match reason instances if possible" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = boolean_flag_with_rules([rule]) + Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached rule match reason + user = { key: 'userkey' } + detail = EvaluationDetail.new(true, 1, EvaluationReason::rule_match(0, 'ruleid')) + result1 = basic_evaluator.evaluate(flag, user, factory) + result2 = basic_evaluator.evaluate(flag, user, factory) + expect(result1.detail.reason.rule_id).to eq 'ruleid' + expect(result1.detail.reason).to be result2.detail.reason + end + it "returns an error if rule variation is too high" do rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } flag = boolean_flag_with_rules([rule]) diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 6fca0315..dcf8928b 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -3,7 +3,7 @@ module LaunchDarkly module Impl - describe "Evaluator", :evaluator_spec_base => true do + describe "Evaluator (general)", :evaluator_spec_base => true do subject { Evaluator } describe "evaluate" do @@ -85,6 +85,24 @@ module Impl expect(result.events).to eq(nil) end + it "reuses prerequisite-failed reason instances if possible" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'badfeature', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + Model.postprocess_item_after_deserializing!(FEATURES, flag) # now there's a cached reason + user = { key: 'x' } + e = subject.new(get_things( 'badfeature' => nil ), get_nothing, logger) + result1 = e.evaluate(flag, user, factory) + expect(result1.detail.reason).to eq EvaluationReason::prerequisite_failed('badfeature') + result2 = e.evaluate(flag, user, factory) + expect(result2.detail.reason).to be result1.detail.reason + end + it "returns off variation and event if prerequisite of a prerequisite is not found" do flag = { key: 'feature0', diff --git a/spec/impl/model/serialization_spec.rb b/spec/impl/model/serialization_spec.rb new file mode 100644 index 00000000..0a26bcd5 --- /dev/null +++ b/spec/impl/model/serialization_spec.rb @@ -0,0 +1,41 @@ +require "spec_helper" + +module LaunchDarkly + module Impl + module Model + describe "model serialization" do + it "serializes flag" do + flag = { key: "flagkey", version: 1 } + json = Model.serialize(FEATURES, flag) + expect(JSON.parse(json, symbolize_names: true)).to eq flag + end + + it "serializes segment" do + segment = { key: "segkey", version: 1 } + json = Model.serialize(SEGMENTS, segment) + expect(JSON.parse(json, symbolize_names: true)).to eq segment + end + + it "serializes arbitrary data kind" do + thing = { key: "thingkey", name: "me" } + json = Model.serialize({ name: "things" }, thing) + expect(JSON.parse(json, symbolize_names: true)).to eq thing + end + + it "deserializes flag with no rules or prerequisites" do + flag_in = { key: "flagkey", version: 1 } + json = Model.serialize(FEATURES, flag_in) + flag_out = Model.deserialize(FEATURES, json) + expect(flag_out).to eq flag_in + end + + it "deserializes segment" do + segment_in = { key: "segkey", version: 1 } + json = Model.serialize(SEGMENTS, segment_in) + segment_out = Model.deserialize(SEGMENTS, json) + expect(segment_out).to eq segment_in + end + end + end + end +end diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index 690147d0..d4a1d9bc 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -19,10 +19,10 @@ def with_processor(store) flag = { key: 'flagkey', version: 1 } segment = { key: 'segkey', version: 1 } all_data = { - flags: { + LaunchDarkly::FEATURES => { flagkey: flag }, - segments: { + LaunchDarkly::SEGMENTS => { segkey: segment } } diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 502f6d86..aaed0a92 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -32,7 +32,7 @@ def with_requestor(base_uri) with_requestor(server.base_uri.to_s) do |requestor| server.setup_ok_response("/", expected_data.to_json) data = requestor.request_all_data() - expect(data).to eq expected_data + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data) end end end @@ -75,7 +75,7 @@ def with_requestor(base_uri) data = requestor.request_all_data() expect(server.requests.count).to eq 2 expect(server.requests[1].header).to include({ "if-none-match" => [ etag ] }) - expect(data).to eq expected_data + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data) end end end @@ -93,14 +93,14 @@ def with_requestor(base_uri) res["ETag"] = etag1 end data = requestor.request_all_data() - expect(data).to eq expected_data1 + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data1) expect(server.requests.count).to eq 1 server.setup_response("/") do |req, res| res.status = 304 end data = requestor.request_all_data() - expect(data).to eq expected_data1 + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data1) expect(server.requests.count).to eq 2 expect(server.requests[1].header).to include({ "if-none-match" => [ etag1 ] }) @@ -110,7 +110,7 @@ def with_requestor(base_uri) res["ETag"] = etag2 end data = requestor.request_all_data() - expect(data).to eq expected_data2 + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data2) expect(server.requests.count).to eq 3 expect(server.requests[2].header).to include({ "if-none-match" => [ etag1 ] }) @@ -118,7 +118,7 @@ def with_requestor(base_uri) res.status = 304 end data = requestor.request_all_data() - expect(data).to eq expected_data2 + expect(data).to eq LaunchDarkly::Impl::Model.make_all_store_data(expected_data2) expect(server.requests.count).to eq 4 expect(server.requests[3].header).to include({ "if-none-match" => [ etag2 ] }) end @@ -131,7 +131,7 @@ def with_requestor(base_uri) server.setup_ok_response("/sdk/latest-all", content, "application/json") with_requestor(server.base_uri.to_s) do |requestor| data = requestor.request_all_data - expect(data).to eq(JSON.parse(content, symbolize_names: true)) + expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(JSON.parse(content, symbolize_names: true))) end end end @@ -143,7 +143,7 @@ def with_requestor(base_uri) "text/plain; charset=ISO-8859-2") with_requestor(server.base_uri.to_s) do |requestor| data = requestor.request_all_data - expect(data).to eq(JSON.parse(content, symbolize_names: true)) + expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(JSON.parse(content, symbolize_names: true))) end end end @@ -160,15 +160,15 @@ def with_requestor(base_uri) end it "can use a proxy server" do - content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + expected_data = { flags: { flagkey: { key: "flagkey" } } } with_server do |server| - server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + server.setup_ok_response("/sdk/latest-all", expected_data.to_json, "application/json", { "etag" => "x" }) with_server(StubProxyServer.new) do |proxy| begin ENV["http_proxy"] = proxy.base_uri.to_s with_requestor(server.base_uri.to_s) do |requestor| data = requestor.request_all_data - expect(data).to eq(JSON.parse(content, symbolize_names: true)) + expect(data).to eq(LaunchDarkly::Impl::Model.make_all_store_data(expected_data)) end ensure ENV["http_proxy"] = nil From 4ffd4fcf402bf4a1acb9fe3a45e0d2c71b00ce7e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 2 Jan 2020 20:29:22 -0800 Subject: [PATCH 144/182] rm unused --- lib/ldclient-rb/impl/model/flag.rb | 26 -------------------------- 1 file changed, 26 deletions(-) delete mode 100644 lib/ldclient-rb/impl/model/flag.rb diff --git a/lib/ldclient-rb/impl/model/flag.rb b/lib/ldclient-rb/impl/model/flag.rb deleted file mode 100644 index a4de3d17..00000000 --- a/lib/ldclient-rb/impl/model/flag.rb +++ /dev/null @@ -1,26 +0,0 @@ - -module LaunchDarkly - module Impl - module Model - # Called after we have deserialized a flag from JSON (because we received it from LaunchDarkly, or - # read it from a persistent data store). Generates immutable instances of every parameterized - # evaluation reason that could be generated by this flag, so we can avoid creating new reason - # instances during evaluations. - def preprocess_flag_after_deserializing(flag) - prereqs = flag[:prerequisites] - if !prereqs.nil? - prereqs.each do |prereq| - prereq[:_reason] = EvaluationReason::prerequisite_failed(prereq[:key]) - end - end - rules = flag[:rules] - if !rules.nil? - rules.each_index do |i| - rule = rules[i] - rule[:_reason] = EvaluationReason::rule_match(i, rule[:id]) - end - end - end - end - end -end From 365dddc56ba542e17da1d24d53abbd25305de8d6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 3 Jan 2020 11:48:27 -0800 Subject: [PATCH 145/182] rename FeatureStore to DataStore --- lib/ldclient-rb/config.rb | 24 +++---- lib/ldclient-rb/file_data_source.rb | 8 +-- .../impl/integrations/consul_impl.rb | 10 +-- .../impl/integrations/dynamodb_impl.rb | 8 +-- .../impl/integrations/redis_impl.rb | 14 ++-- lib/ldclient-rb/impl/store_client_wrapper.rb | 8 +-- lib/ldclient-rb/impl/store_data_set_sorter.rb | 6 +- lib/ldclient-rb/in_memory_store.rb | 10 +-- lib/ldclient-rb/integrations/consul.rb | 12 ++-- lib/ldclient-rb/integrations/dynamodb.rb | 20 +++--- lib/ldclient-rb/integrations/redis.rb | 22 +++--- .../integrations/util/store_wrapper.rb | 24 +++---- lib/ldclient-rb/interfaces.rb | 16 ++--- lib/ldclient-rb/ldclient.rb | 14 ++-- lib/ldclient-rb/polling.rb | 2 +- lib/ldclient-rb/redis_store.rb | 16 ++--- lib/ldclient-rb/stream.rb | 14 ++-- ...e_spec_base.rb => data_store_spec_base.rb} | 4 +- spec/file_data_source_spec.rb | 2 +- spec/in_memory_data_store_spec.rb | 12 ++++ spec/in_memory_feature_store_spec.rb | 12 ---- ...tore_spec.rb => consul_data_store_spec.rb} | 12 ++-- ...re_spec.rb => dynamodb_data_store_spec.rb} | 12 ++-- spec/ldclient_spec.rb | 72 +++++++++---------- spec/polling_spec.rb | 14 ++-- spec/redis_feature_store_spec.rb | 14 ++-- spec/stream_spec.rb | 16 ++--- 27 files changed, 199 insertions(+), 199 deletions(-) rename spec/{feature_store_spec_base.rb => data_store_spec_base.rb} (97%) create mode 100644 spec/in_memory_data_store_spec.rb delete mode 100644 spec/in_memory_feature_store_spec.rb rename spec/integrations/{consul_feature_store_spec.rb => consul_data_store_spec.rb} (65%) rename spec/integrations/{dynamodb_feature_store_spec.rb => dynamodb_data_store_spec.rb} (85%) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index c7c42e56..935abdad 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -23,7 +23,7 @@ class Config # @option opts [Float] :read_timeout (10) See {#read_timeout}. # @option opts [Float] :connect_timeout (2) See {#connect_timeout}. # @option opts [Object] :cache_store See {#cache_store}. - # @option opts [Object] :feature_store See {#feature_store}. + # @option opts [Object] :data_store See {#data_store}. # @option opts [Boolean] :use_ldd (false) See {#use_ldd?}. # @option opts [Boolean] :offline (false) See {#offline?}. # @option opts [Float] :poll_interval (30) See {#poll_interval}. @@ -48,7 +48,7 @@ def initialize(opts = {}) @flush_interval = opts[:flush_interval] || Config.default_flush_interval @connect_timeout = opts[:connect_timeout] || Config.default_connect_timeout @read_timeout = opts[:read_timeout] || Config.default_read_timeout - @feature_store = opts[:feature_store] || Config.default_feature_store + @data_store = opts[:data_store] || Config.default_data_store @stream = opts.has_key?(:stream) ? opts[:stream] : Config.default_stream @use_ldd = opts.has_key?(:use_ldd) ? opts[:use_ldd] : Config.default_use_ldd @offline = opts.has_key?(:offline) ? opts[:offline] : Config.default_offline @@ -98,9 +98,9 @@ def stream? # # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, the client does not # use polling or streaming to get feature flag updates from the server, but instead reads them - # from the {#feature_store feature store}, which is assumed to be a database that is populated by + # from the {#data_store data store}, which is assumed to be a database that is populated by # a LaunchDarkly relay proxy. For more information, see ["The relay proxy"](https://docs.launchdarkly.com/v2.0/docs/the-relay-proxy) - # and ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # and ["Using a persistent data store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # All other properties related to streaming or polling are ignored if this option is set to true. # @@ -176,13 +176,13 @@ def offline? # # A store for feature flags and related data. The client uses it to store all data received # from LaunchDarkly, and uses the last stored data when evaluating flags. Defaults to - # {InMemoryFeatureStore}; for other implementations, see {LaunchDarkly::Integrations}. + # {InMemoryDataStore}; for other implementations, see {LaunchDarkly::Integrations}. # - # For more information, see ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # For more information, see ["Using a persistent data store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # - # @return [LaunchDarkly::Interfaces::FeatureStore] + # @return [LaunchDarkly::Interfaces::DataStore] # - attr_reader :feature_store + attr_reader :data_store # # True if all user attributes (other than the key) should be considered private. This means @@ -361,11 +361,11 @@ def self.default_use_ldd end # - # The default value for {#feature_store}. - # @return [LaunchDarkly::Interfaces::FeatureStore] an {InMemoryFeatureStore} + # The default value for {#data_store}. + # @return [LaunchDarkly::Interfaces::DataStore] an {InMemoryDataStore} # - def self.default_feature_store - InMemoryFeatureStore.new + def self.default_data_store + InMemoryDataStore.new end # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index cfea75f7..6cc0dc39 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -118,14 +118,14 @@ class FileDataSource # @return an object that can be stored in {Config#data_source} # def self.factory(options={}) - return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) } + return lambda { |sdk_key, config| FileDataSourceImpl.new(config.data_store, config.logger, options) } end end # @private class FileDataSourceImpl - def initialize(feature_store, logger, options={}) - @feature_store = feature_store + def initialize(data_store, logger, options={}) + @data_store = data_store @logger = logger @paths = options[:paths] || [] if @paths.is_a? String @@ -187,7 +187,7 @@ def load_all return end end - @feature_store.init(all_data) + @data_store.init(all_data) @initialized.make_true end diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 2f186dab..34aea72c 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -5,9 +5,9 @@ module Impl module Integrations module Consul # - # Internal implementation of the Consul feature store, intended to be used with CachingStoreWrapper. + # Internal implementation of the Consul data store, intended to be used with CachingStoreWrapper. # - class ConsulFeatureStoreCore + class ConsulDataStoreCore begin require "diplomat" CONSUL_ENABLED = true @@ -17,14 +17,14 @@ class ConsulFeatureStoreCore def initialize(opts) if !CONSUL_ENABLED - raise RuntimeError.new("can't use Consul feature store without the 'diplomat' gem") + raise RuntimeError.new("can't use Consul data store without the 'diplomat' gem") end @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? Diplomat.configuration.url = opts[:url] if !opts[:url].nil? - @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") + @logger.info("ConsulDataStore: using Consul host at #{Diplomat.configuration.url}") end def init_internal(all_data) @@ -90,7 +90,7 @@ def upsert_internal(kind, new_item) else old_item = Model.deserialize(kind, old_value[0]["Value"]) # Check whether the item is stale. If so, don't do the update (and return the existing item to - # FeatureStoreWrapper so it can be cached) + # DataStoreWrapper so it can be cached) if old_item[:version] >= new_item[:version] return old_item end diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index 464eb5e4..fb3a6bd4 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -5,9 +5,9 @@ module Impl module Integrations module DynamoDB # - # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. + # Internal implementation of the DynamoDB data store, intended to be used with CachingStoreWrapper. # - class DynamoDBFeatureStoreCore + class DynamoDBDataStoreCore begin require "aws-sdk-dynamodb" AWS_SDK_ENABLED = true @@ -28,7 +28,7 @@ class DynamoDBFeatureStoreCore def initialize(table_name, opts) if !AWS_SDK_ENABLED - raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem") + raise RuntimeError.new("can't use DynamoDB data store without the aws-sdk or aws-sdk-dynamodb gem") end @table_name = table_name @@ -41,7 +41,7 @@ def initialize(table_name, opts) @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end - @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") + @logger.info("DynamoDBDataStore: using DynamoDB table \"#{table_name}\"") end def init_internal(all_data) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index ac16a976..e58dfd07 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -6,9 +6,9 @@ module Impl module Integrations module Redis # - # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. + # Internal implementation of the Redis data store, intended to be used with CachingStoreWrapper. # - class RedisFeatureStoreCore + class RedisDataStoreCore begin require "redis" require "connection_pool" @@ -19,7 +19,7 @@ class RedisFeatureStoreCore def initialize(opts) if !REDIS_ENABLED - raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool") + raise RuntimeError.new("can't use Redis data store because one of these gems is missing: redis, connection_pool") end @redis_opts = opts[:redis_opts] || Hash.new @@ -40,7 +40,7 @@ def initialize(opts) @stopped = Concurrent::AtomicBoolean.new(false) with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + @logger.info("RedisDataStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ and prefix: #{@prefix}") end end @@ -59,7 +59,7 @@ def init_internal(all_data) multi.set(inited_key, inited_key) end end - @logger.info { "RedisFeatureStore: initialized with #{count} items" } + @logger.info { "RedisDataStore: initialized with #{count} items" } end def get_internal(kind, key) @@ -95,13 +95,13 @@ def upsert_internal(kind, new_item) multi.hset(base_key, key, Model.serialize(kind, new_item)) end if result.nil? - @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } + @logger.debug { "RedisDataStore: concurrent modification detected, retrying" } try_again = true end else final_item = old_item action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ + @logger.warn { "RedisDataStore: attempted to #{action} #{key} version: #{old_item[:version]} \ in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } end redis.unwatch diff --git a/lib/ldclient-rb/impl/store_client_wrapper.rb b/lib/ldclient-rb/impl/store_client_wrapper.rb index f0948251..8c3160f1 100644 --- a/lib/ldclient-rb/impl/store_client_wrapper.rb +++ b/lib/ldclient-rb/impl/store_client_wrapper.rb @@ -4,19 +4,19 @@ module LaunchDarkly module Impl # - # Provides additional behavior that the client requires before or after feature store operations. + # Provides additional behavior that the client requires before or after data store operations. # Currently this just means sorting the data set for init(). In the future we may also use this # to provide an update listener capability. # - class FeatureStoreClientWrapper - include Interfaces::FeatureStore + class DataStoreClientWrapper + include Interfaces::DataStore def initialize(store) @store = store end def init(all_data) - @store.init(FeatureStoreDataSetSorter.sort_all_collections(all_data)) + @store.init(DataStoreDataSetSorter.sort_all_collections(all_data)) end def get(kind, key) diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index 4454fe75..6dad1b36 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -2,10 +2,10 @@ module LaunchDarkly module Impl # - # Implements a dependency graph ordering for data to be stored in a feature store. We must use this - # on every data set that will be passed to the feature store's init() method. + # Implements a dependency graph ordering for data to be stored in a data store. We must use this + # on every data set that will be passed to the data store's init() method. # - class FeatureStoreDataSetSorter + class DataStoreDataSetSorter # # Returns a copy of the input hash that has the following guarantees: the iteration order of the outer # hash will be in ascending order by the VersionDataKind's :priority property (if any), and for each diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 576d90c7..d3bee07e 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -2,12 +2,12 @@ module LaunchDarkly - # These constants denote the types of data that can be stored in the feature store. If + # These constants denote the types of data that can be stored in the data store. If # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. # - # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter + # The :priority and :get_dependency_keys properties are used by DataStoreDataSetSorter # to ensure data consistency during non-atomic updates. # @private @@ -24,12 +24,12 @@ module LaunchDarkly }.freeze # - # Default implementation of the LaunchDarkly client's feature store, using an in-memory + # Default implementation of the LaunchDarkly client's data store, using an in-memory # cache. This object holds feature flags and related data received from LaunchDarkly. # Database-backed implementations are available in {LaunchDarkly::Integrations}. # - class InMemoryFeatureStore - include LaunchDarkly::Interfaces::FeatureStore + class InMemoryDataStore + include LaunchDarkly::Interfaces::DataStore def initialize @items = Hash.new diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 4f32d5fd..0ecf69f8 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -5,7 +5,7 @@ module LaunchDarkly module Integrations module Consul # - # Default value for the `prefix` option for {new_feature_store}. + # Default value for the `prefix` option for {new_data_store}. # # @return [String] the default key prefix # @@ -14,10 +14,10 @@ def self.default_prefix end # - # Creates a Consul-backed persistent feature store. + # Creates a Consul-backed persistent data store. # # To use this method, you must first install the gem `diplomat`. Then, put the object returned by - # this method into the `feature_store` property of your client configuration ({LaunchDarkly::Config}). + # this method into the `data_store` property of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default @@ -27,10 +27,10 @@ def self.default_prefix # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # @return [LaunchDarkly::Interfaces::DataStore] a data store object # - def self.new_feature_store(opts, &block) - core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts) + def self.new_data_store(opts, &block) + core = LaunchDarkly::Impl::Integrations::Consul::ConsulDataStoreCore.new(opts) return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end end diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 189e118f..dddf38f0 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -5,17 +5,17 @@ module LaunchDarkly module Integrations module DynamoDB # - # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can - # use a persistent feature store, see the + # Creates a DynamoDB-backed persistent data store. For more details about how and why you can + # use a persistent data store, see the # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or - # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property + # the full `aws-sdk`. Then, put the object returned by this method into the `data_store` property # of your client configuration ({LaunchDarkly::Config}). # - # @example Configuring the feature store - # store = LaunchDarkly::Integrations::DynamoDB::new_feature_store("my-table-name") - # config = LaunchDarkly::Config.new(feature_store: store) + # @example Configuring the data store + # store = LaunchDarkly::Integrations::DynamoDB::new_data_store("my-table-name") + # config = LaunchDarkly::Config.new(data_store: store) # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # Note that the specified table must already exist in DynamoDB. It must have a partition key called @@ -31,15 +31,15 @@ module DynamoDB # @param table_name [String] name of an existing DynamoDB table # @param opts [Hash] the configuration options # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) - # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use + # @option opts [Object] :existing_client an already-constructed DynamoDB client for the data store to use # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # @return [LaunchDarkly::Interfaces::DataStore] a data store object # - def self.new_feature_store(table_name, opts) - core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) + def self.new_data_store(table_name, opts) + core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBDataStoreCore.new(table_name, opts) return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 7e447657..f7437b22 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -4,7 +4,7 @@ module LaunchDarkly module Integrations module Redis # - # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of + # Default value for the `redis_url` option for {new_data_store}. This points to an instance of # Redis running at `localhost` with its default port. # # @return [String] the default Redis URL @@ -14,7 +14,7 @@ def self.default_redis_url end # - # Default value for the `prefix` option for {new_feature_store}. + # Default value for the `prefix` option for {new_data_store}. # # @return [String] the default key prefix # @@ -23,17 +23,17 @@ def self.default_prefix end # - # Creates a Redis-backed persistent feature store. For more details about how and why you can - # use a persistent feature store, see the + # Creates a Redis-backed persistent data store. For more details about how and why you can + # use a persistent data store, see the # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, - # put the object returned by this method into the `feature_store` property of your + # put the object returned by this method into the `data_store` property of your # client configuration. # - # @example Configuring the feature store - # store = LaunchDarkly::Integrations::Redis::new_feature_store(redis_url: "redis://my-server") - # config = LaunchDarkly::Config.new(feature_store: store) + # @example Configuring the data store + # store = LaunchDarkly::Integrations::Redis::new_data_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(data_store: store) # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # @param opts [Hash] the configuration options @@ -45,10 +45,10 @@ def self.default_prefix # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @option opts [Object] :pool custom connection pool, if desired - # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # @return [LaunchDarkly::Interfaces::DataStore] a data store object # - def self.new_feature_store(opts) - return RedisFeatureStore.new(opts) + def self.new_data_store(opts) + return RedisDataStore.new(opts) end end end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 26318d67..c9ff5bcf 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -6,22 +6,22 @@ module LaunchDarkly module Integrations module Util # - # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::DataStore} # pattern that delegates part of its behavior to another object, while providing optional caching - # behavior and other logic that would otherwise be repeated in every feature store implementation. + # behavior and other logic that would otherwise be repeated in every data store implementation. # This makes it easier to create new database integrations by implementing only the database-specific # logic. # - # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner + # The mixin {DataStoreCore} describes the methods that need to be supported by the inner # implementation object. # class CachingStoreWrapper - include LaunchDarkly::Interfaces::FeatureStore + include LaunchDarkly::Interfaces::DataStore # # Creates a new store wrapper instance. # - # @param core [Object] an object that implements the {FeatureStoreCore} methods + # @param core [Object] an object that implements the {DataStoreCore} methods # @param opts [Hash] a hash that may include cache-related options; all others will be ignored # @option opts [Float] :expiration (15) cache TTL; zero means no caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache @@ -146,9 +146,9 @@ def items_if_not_deleted(items) # This module describes the methods that you must implement on your own object in order to # use {CachingStoreWrapper}. # - module FeatureStoreCore + module DataStoreCore # - # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::DataStore#init}, # but the wrapper will take care of updating the cache if caching is enabled. # # If possible, the store should update the entire data set atomically. If that is not possible, @@ -164,7 +164,7 @@ def init_internal(all_data) end # - # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::DataStore#get} # except that 1. the wrapper will take care of filtering out deleted entities by checking the # `:deleted` property, so you can just return exactly what was in the data store, and 2. the # wrapper will take care of checking and updating the cache if caching is enabled. @@ -177,7 +177,7 @@ def get_internal(kind, key) end # - # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::DataStore#all} # except that 1. the wrapper will take care of filtering out deleted entities by checking the # `:deleted` property, so you can just return exactly what was in the data store, and 2. the # wrapper will take care of checking and updating the cache if caching is enabled. @@ -190,13 +190,13 @@ def get_all_internal(kind) end # - # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::DataStore#upsert} # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. # the method is expected to return the final state of the entity (i.e. either the `item` # parameter if the update succeeded, or the previously existing entity in the store if the # update failed; this is used for the caching logic). # - # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # Note that DataStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. # # @param kind [Object] the kind of entity to add or update @@ -208,7 +208,7 @@ def upsert_internal(kind, item) # # Checks whether this store has been initialized. This is the same as - # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern + # {LaunchDarkly::Interfaces::DataStore#initialized?} except that there is less of a concern # for efficiency, because the wrapper will use caching and memoization in order to call the method # as little as possible. # diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index d2a9f862..36bdcd94 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -5,13 +5,13 @@ module LaunchDarkly # module Interfaces # - # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly - # client uses the feature store to persist feature flags and related objects received from + # Mixin that defines the required methods of a data store implementation. The LaunchDarkly + # client uses the data store to persist feature flags and related objects received from # the LaunchDarkly service. Implementations must support concurrent access and updates. - # For more about how feature stores can be used, see: - # [Using a persistent feature store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # For more about how data stores can be used, see: + # [Using a persistent data store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # - # An entity that can be stored in a feature store is a hash that can be converted to and from + # An entity that can be stored in a data store is a hash that can be converted to and from # JSON, and that has at a minimum the following properties: `:key`, a string that is unique # among entities of the same kind; `:version`, an integer that is higher for newer data; # `:deleted`, a boolean (optional, defaults to false) that if true means this is a @@ -22,12 +22,12 @@ module Interfaces # `:namespace`, which is a short string unique to that kind. This string can be used as a # collection name or a key prefix. # - # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations + # The default implementation is {LaunchDarkly::InMemoryDataStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new # implementation, see {LaunchDarkly::Integrations::Util} for tools that can make this task # simpler. # - module FeatureStore + module DataStore # # Initializes (or re-initializes) the store with the specified set of entities. Any # existing entries will be removed. Implementations can assume that this data set is up to @@ -116,7 +116,7 @@ def stop # # Mixin that defines the required methods of a data source implementation. This is the # component that delivers feature flag data from LaunchDarkly to the LDClient by putting - # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. + # the data in the {DataStore}. It is expected to run concurrently on its own thread. # # The client has its own standard implementation, which uses either a streaming connection or # polling depending on your configuration. Normally you will not need to use another one diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 35c1bc41..eed490a9 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -37,13 +37,13 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @event_factory_default = EventFactory.new(false) @event_factory_with_reasons = EventFactory.new(true) - # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add + # We need to wrap the data store object with a DataStoreClientWrapper in order to add # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses - # the feature store through the Config object, so we need to make a new Config that uses + # the data store through the Config object, so we need to make a new Config that uses # the wrapped store. - @store = Impl::FeatureStoreClientWrapper.new(config.feature_store) + @store = Impl::DataStoreClientWrapper.new(config.data_store) updated_config = config.clone - updated_config.instance_variable_set(:@feature_store, @store) + updated_config.instance_variable_set(:@data_store, @store) @config = updated_config get_flag = lambda { |key| @store.get(FEATURES, key) } @@ -127,7 +127,7 @@ def secure_mode_hash(user) # given up permanently (for instance, if your SDK key is invalid). In the meantime, # any call to {#variation} or {#variation_detail} will behave as follows: # - # 1. It will check whether the feature store already contains data (that is, you + # 1. It will check whether the data store already contains data (that is, you # are using a database-backed store and it was populated by a previous run of this # application). If so, it will use the last known feature flag data. # @@ -362,9 +362,9 @@ def evaluate_internal(key, user, default, event_factory) if !initialized? if @store.initialized? - @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } + @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from data store" } else - @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } + @config.logger.error { "[LDClient] Client has not finished initializing; data store unavailable, returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index a9312413..5cbc220a 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -37,7 +37,7 @@ def stop def poll all_data = @requestor.request_all_data if all_data - @config.feature_store.init(all_data) + @config.data_store.init(all_data) if @initialized.make_true @config.logger.info { "[LDClient] Polling connection initialized" } @ready.set diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 48632411..128336b0 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -3,28 +3,28 @@ module LaunchDarkly # - # An implementation of the LaunchDarkly client's feature store that uses a Redis + # An implementation of the LaunchDarkly client's data store that uses a Redis # instance. This object holds feature flags and related data received from the # streaming API. Feature data can also be further cached in memory to reduce overhead # of calls to Redis. # # To use this class, you must first have the `redis` and `connection-pool` gems - # installed. Then, create an instance and store it in the `feature_store` property + # installed. Then, create an instance and store it in the `data_store` property # of your client configuration. # # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific # implementation class may be changed or removed in the future. # - class RedisFeatureStore - include LaunchDarkly::Interfaces::FeatureStore + class RedisDataStore + include LaunchDarkly::Interfaces::DataStore # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating - # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical - # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate + # to RedisDataStoreCore where the actual database logic is. This class was retained for historical + # reasons, so that existing code can still call RedisDataStore.new. In the future, we will migrate # away from exposing these concrete classes and use factory methods instead. # - # Constructor for a RedisFeatureStore instance. + # Constructor for a RedisDataStore instance. # # @param opts [Hash] the configuration options # @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts) @@ -37,7 +37,7 @@ class RedisFeatureStore # @option opts [Object] :pool custom connection pool, if desired # def initialize(opts = {}) - core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) + core = LaunchDarkly::Impl::Integrations::Redis::RedisDataStoreCore.new(opts) @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 9add0593..b5962e00 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -29,7 +29,7 @@ class StreamProcessor def initialize(sdk_key, config, requestor) @sdk_key = sdk_key @config = config - @feature_store = config.feature_store + @data_store = config.data_store @requestor = requestor @initialized = Concurrent::AtomicBoolean.new(false) @started = Concurrent::AtomicBoolean.new(false) @@ -89,7 +89,7 @@ def process_message(message) if method == PUT message = JSON.parse(message.data, symbolize_names: true) all_data = Impl::Model.make_all_store_data(message[:data]) - @feature_store.init(all_data) + @data_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized" } @ready.set @@ -100,7 +100,7 @@ def process_message(message) if key data = data[:data] Impl::Model.postprocess_item_after_deserializing!(kind, data) - @feature_store.upsert(kind, data) + @data_store.upsert(kind, data) break end end @@ -109,23 +109,23 @@ def process_message(message) for kind in [FEATURES, SEGMENTS] key = key_for_path(kind, data[:path]) if key - @feature_store.delete(kind, key, data[:version]) + @data_store.delete(kind, key, data[:version]) break end end elsif method == INDIRECT_PUT all_data = @requestor.request_all_data - @feature_store.init(all_data) + @data_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized (via indirect message)" } elsif method == INDIRECT_PATCH key = key_for_path(FEATURES, message.data) if key - @feature_store.upsert(FEATURES, @requestor.request_flag(key)) + @data_store.upsert(FEATURES, @requestor.request_flag(key)) else key = key_for_path(SEGMENTS, message.data) if key - @feature_store.upsert(SEGMENTS, @requestor.request_segment(key)) + @data_store.upsert(SEGMENTS, @requestor.request_segment(key)) end end else diff --git a/spec/feature_store_spec_base.rb b/spec/data_store_spec_base.rb similarity index 97% rename from spec/feature_store_spec_base.rb rename to spec/data_store_spec_base.rb index 2d06f0ff..a937d93e 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/data_store_spec_base.rb @@ -1,9 +1,9 @@ require "spec_helper" -shared_examples "feature_store" do |create_store_method, clear_data_method| +shared_examples "data_store" do |create_store_method, clear_data_method| # Rather than testing with feature flag or segment data, we'll use this fake data kind - # to make it clear that feature stores need to be able to handle arbitrary data. + # to make it clear that data stores need to be able to handle arbitrary data. let(:things_kind) { { namespace: "things" } } let(:key1) { "thing1" } diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 837b775d..c9670a11 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -96,7 +96,7 @@ def []=(key, value) before do @config = LaunchDarkly::Config.new - @store = @config.feature_store + @store = @config.data_store @tmp_dir = Dir.mktmpdir end diff --git a/spec/in_memory_data_store_spec.rb b/spec/in_memory_data_store_spec.rb new file mode 100644 index 00000000..e43a2ebb --- /dev/null +++ b/spec/in_memory_data_store_spec.rb @@ -0,0 +1,12 @@ +require "data_store_spec_base" +require "spec_helper" + +def create_in_memory_store(opts = {}) + LaunchDarkly::InMemoryDataStore.new +end + +describe LaunchDarkly::InMemoryDataStore do + subject { LaunchDarkly::InMemoryDataStore } + + include_examples "data_store", method(:create_in_memory_store) +end diff --git a/spec/in_memory_feature_store_spec.rb b/spec/in_memory_feature_store_spec.rb deleted file mode 100644 index c403fc69..00000000 --- a/spec/in_memory_feature_store_spec.rb +++ /dev/null @@ -1,12 +0,0 @@ -require "feature_store_spec_base" -require "spec_helper" - -def create_in_memory_store(opts = {}) - LaunchDarkly::InMemoryFeatureStore.new -end - -describe LaunchDarkly::InMemoryFeatureStore do - subject { LaunchDarkly::InMemoryFeatureStore } - - include_examples "feature_store", method(:create_in_memory_store) -end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_data_store_spec.rb similarity index 65% rename from spec/integrations/consul_feature_store_spec.rb rename to spec/integrations/consul_data_store_spec.rb index e74d0f0d..07680afa 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_data_store_spec.rb @@ -1,4 +1,4 @@ -require "feature_store_spec_base" +require "data_store_spec_base" require "diplomat" require "spec_helper" @@ -13,12 +13,12 @@ } def create_consul_store(opts = {}) - LaunchDarkly::Integrations::Consul::new_feature_store( + LaunchDarkly::Integrations::Consul::new_data_store( $consul_base_opts.merge(opts).merge({ expiration: 60 })) end def create_consul_store_uncached(opts = {}) - LaunchDarkly::Integrations::Consul::new_feature_store( + LaunchDarkly::Integrations::Consul::new_data_store( $consul_base_opts.merge(opts).merge({ expiration: 0 })) end @@ -27,16 +27,16 @@ def clear_all_data end -describe "Consul feature store" do +describe "Consul data store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local Consul instance running. context "with local cache" do - include_examples "feature_store", method(:create_consul_store), method(:clear_all_data) + include_examples "data_store", method(:create_consul_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data) + include_examples "data_store", method(:create_consul_store_uncached), method(:clear_all_data) end end diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_data_store_spec.rb similarity index 85% rename from spec/integrations/dynamodb_feature_store_spec.rb rename to spec/integrations/dynamodb_data_store_spec.rb index 7734670e..6dbff05d 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_data_store_spec.rb @@ -1,4 +1,4 @@ -require "feature_store_spec_base" +require "data_store_spec_base" require "aws-sdk-dynamodb" require "spec_helper" @@ -22,12 +22,12 @@ } def create_dynamodb_store(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + LaunchDarkly::Integrations::DynamoDB::new_data_store($table_name, $ddb_base_opts.merge(opts).merge({ expiration: 60 })) end def create_dynamodb_store_uncached(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + LaunchDarkly::Integrations::DynamoDB::new_data_store($table_name, $ddb_base_opts.merge(opts).merge({ expiration: 0 })) end @@ -88,7 +88,7 @@ def create_test_client end -describe "DynamoDB feature store" do +describe "DynamoDB data store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local DynamoDB instance running. @@ -96,10 +96,10 @@ def create_test_client create_table_if_necessary context "with local cache" do - include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data) + include_examples "data_store", method(:create_dynamodb_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) + include_examples "data_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 5ca9b9fd..e894077f 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -70,21 +70,21 @@ def event_processor end it "returns the value for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(client.variation("key", user, "default")).to eq "value" end it "returns the default value if a feature evaluates to nil" do empty_feature = { key: "key", on: false, offVariation: nil } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, empty_feature) expect(client.variation("key", user, "default")).to eq "default" end it "queues a feature request event for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -100,8 +100,8 @@ def event_processor end it "queues a feature event for an existing feature when user is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -116,8 +116,8 @@ def event_processor end it "queues a feature event for an existing feature when user key is nil" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) bad_user = { name: "Bob" } expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", @@ -147,8 +147,8 @@ def event_processor trackEvents: true ] } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, flag) expect(event_processor).to receive(:add_event).with(hash_including( kind: 'feature', key: 'flag', @@ -172,8 +172,8 @@ def event_processor rules: [], trackEventsFallthrough: true } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, flag) expect(event_processor).to receive(:add_event).with(hash_including( kind: 'feature', key: 'flag', @@ -215,8 +215,8 @@ def event_processor end it "returns a value for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) result = client.variation_detail("key", user, "default") expected = LaunchDarkly::EvaluationDetail.new("value", 0, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected @@ -224,8 +224,8 @@ def event_processor it "returns the default value if a feature evaluates to nil" do empty_feature = { key: "key", on: false, offVariation: nil } - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, empty_feature) result = client.variation_detail("key", user, "default") expected = LaunchDarkly::EvaluationDetail.new("default", nil, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected @@ -233,8 +233,8 @@ def event_processor end it "queues a feature request event for an existing feature" do - config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.data_store.init({ LaunchDarkly::FEATURES => {} }) + config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -256,28 +256,28 @@ def event_processor let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } it "returns flag values" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags({ key: 'userkey' }) expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) end it "returns empty map for nil user" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags(nil) expect(result).to eq({}) end it "returns empty map for nil user key" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags({}) expect(result).to eq({}) end it "returns empty map if offline" do - offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + offline_config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = offline_client.all_flags(nil) expect(result).to eq({}) @@ -289,7 +289,7 @@ def event_processor let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } it "returns flags state" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be true @@ -322,7 +322,7 @@ def event_processor flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } - config.feature_store.init({ LaunchDarkly::FEATURES => { + config.data_store.init({ LaunchDarkly::FEATURES => { flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 }}) @@ -339,7 +339,7 @@ def event_processor flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) expect(state.valid?).to be true @@ -372,7 +372,7 @@ def event_processor end it "returns empty state for nil user" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state(nil) expect(state.valid?).to be false @@ -380,7 +380,7 @@ def event_processor end it "returns empty state for nil user key" do - config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state({}) expect(state.valid?).to be false @@ -388,7 +388,7 @@ def event_processor end it "returns empty state if offline" do - offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + offline_config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = offline_client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be false @@ -472,7 +472,7 @@ def event_processor end end - describe "feature store data ordering" do + describe "data store data ordering" do let(:dependency_ordering_test_data) { { LaunchDarkly::FEATURES => { @@ -489,7 +489,7 @@ def event_processor } } - class FakeFeatureStore + class FakeDataStore attr_reader :received_data def init(all_data) @@ -518,11 +518,11 @@ def initialized? end end - it "passes data set to feature store in correct order on init" do - store = FakeFeatureStore.new - data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store, + it "passes data set to data store in correct order on init" do + store = FakeDataStore.new + data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.data_store, dependency_ordering_test_data) } - config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory) + config = LaunchDarkly::Config.new(send_events: false, data_store: store, data_source: data_source_factory) client = subject.new("secret", config) data = store.received_data diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index d4a1d9bc..6fbaa6c9 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -6,7 +6,7 @@ let(:requestor) { double() } def with_processor(store) - config = LaunchDarkly::Config.new(feature_store: store) + config = LaunchDarkly::Config.new(data_store: store) processor = subject.new(config, requestor) begin yield processor @@ -29,7 +29,7 @@ def with_processor(store) it 'puts feature data in store' do allow(requestor).to receive(:request_all_data).and_return(all_data) - store = LaunchDarkly::InMemoryFeatureStore.new + store = LaunchDarkly::InMemoryDataStore.new with_processor(store) do |processor| ready = processor.start ready.wait @@ -40,7 +40,7 @@ def with_processor(store) it 'sets initialized to true' do allow(requestor).to receive(:request_all_data).and_return(all_data) - store = LaunchDarkly::InMemoryFeatureStore.new + store = LaunchDarkly::InMemoryDataStore.new with_processor(store) do |processor| ready = processor.start ready.wait @@ -53,7 +53,7 @@ def with_processor(store) describe 'connection error' do it 'does not cause immediate failure, does not set initialized' do allow(requestor).to receive(:request_all_data).and_raise(StandardError.new("test error")) - store = LaunchDarkly::InMemoryFeatureStore.new + store = LaunchDarkly::InMemoryDataStore.new with_processor(store) do |processor| ready = processor.start finished = ready.wait(0.2) @@ -67,7 +67,7 @@ def with_processor(store) describe 'HTTP errors' do def verify_unrecoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| ready = processor.start finished = ready.wait(0.2) expect(finished).to be true @@ -77,7 +77,7 @@ def verify_unrecoverable_http_error(status) def verify_recoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| ready = processor.start finished = ready.wait(0.2) expect(finished).to be false @@ -108,7 +108,7 @@ def verify_recoverable_http_error(status) describe 'stop' do it 'stops promptly rather than continuing to wait for poll interval' do - with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| sleep(1) # somewhat arbitrary, but should ensure that it has started polling start_time = Time.now processor.stop diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index 5aec6658..6824b60b 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -1,4 +1,4 @@ -require "feature_store_spec_base" +require "data_store_spec_base" require "json" require "redis" require "spec_helper" @@ -15,11 +15,11 @@ } def create_redis_store(opts = {}) - LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 60 })) + LaunchDarkly::RedisDataStore.new($base_opts.merge(opts).merge({ expiration: 60 })) end def create_redis_store_uncached(opts = {}) - LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 0 })) + LaunchDarkly::RedisDataStore.new($base_opts.merge(opts).merge({ expiration: 0 })) end def clear_all_data @@ -28,19 +28,19 @@ def clear_all_data end -describe LaunchDarkly::RedisFeatureStore do - subject { LaunchDarkly::RedisFeatureStore } +describe LaunchDarkly::RedisDataStore do + subject { LaunchDarkly::RedisDataStore } break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do - include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) + include_examples "data_store", method(:create_redis_store), method(:clear_all_data) end context "real Redis without local cache" do - include_examples "feature_store", method(:create_redis_store_uncached), method(:clear_all_data) + include_examples "data_store", method(:create_redis_store_uncached), method(:clear_all_data) end def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 648833ff..aa97cbf9 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -18,38 +18,38 @@ it "will accept PUT methods" do processor.send(:process_message, put_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") + expect(config.data_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") + expect(config.data_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") end it "will accept PATCH methods for flags" do processor.send(:process_message, patch_flag_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) + expect(config.data_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) end it "will accept PATCH methods for segments" do processor.send(:process_message, patch_seg_message) - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) + expect(config.data_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) end it "will accept DELETE methods for flags" do processor.send(:process_message, patch_flag_message) processor.send(:process_message, delete_flag_message) - expect(config.feature_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) + expect(config.data_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) end it "will accept DELETE methods for segments" do processor.send(:process_message, patch_seg_message) processor.send(:process_message, delete_seg_message) - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) + expect(config.data_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) end it "will accept INDIRECT PATCH method for flags" do flag = { key: 'key', version: 1 } allow(requestor).to receive(:request_flag).with(flag[:key]).and_return(flag) processor.send(:process_message, indirect_patch_flag_message); - expect(config.feature_store.get(LaunchDarkly::FEATURES, flag[:key])).to eq(flag) + expect(config.data_store.get(LaunchDarkly::FEATURES, flag[:key])).to eq(flag) end it "will accept INDIRECT PATCH method for segments" do segment = { key: 'key', version: 1 } allow(requestor).to receive(:request_segment).with(segment[:key]).and_return(segment) processor.send(:process_message, indirect_patch_segment_message); - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, segment[:key])).to eq(segment) + expect(config.data_store.get(LaunchDarkly::SEGMENTS, segment[:key])).to eq(segment) end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn From 38493b952d34efc893a021f4d201a2220282fd18 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 3 Jan 2020 11:50:52 -0800 Subject: [PATCH 146/182] remove references to UpdateProcessor (now DataSource) --- lib/ldclient-rb/config.rb | 12 +----------- lib/ldclient-rb/ldclient.rb | 6 +++--- spec/ldclient_spec.rb | 6 +++--- 3 files changed, 7 insertions(+), 17 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 935abdad..560896b9 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -35,8 +35,6 @@ class Config # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. # @option opts [Object] :data_source See {#data_source}. - # @option opts [Object] :update_processor Obsolete synonym for `data_source`. - # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -59,9 +57,7 @@ def initialize(opts = {}) @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false - @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] - @update_processor = opts[:update_processor] - @update_processor_factory = opts[:update_processor_factory] + @data_source = opts[:data_source] end # @@ -251,12 +247,6 @@ def offline? # attr_reader :data_source - # @deprecated This is replaced by {#data_source}. - attr_reader :update_processor - - # @deprecated This is replaced by {#data_source}. - attr_reader :update_processor_factory - # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index eed490a9..fd42b364 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -58,7 +58,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) if @config.use_ldd? @config.logger.info { "[LDClient] Started LaunchDarkly Client in LDD mode" } - return # requestor and update processor are not used in this mode + return # requestor and data processor are not used in this mode end data_source_or_factory = @config.data_source || self.method(:create_default_data_source) @@ -342,7 +342,7 @@ def close def create_default_data_source(sdk_key, config) if config.offline? - return NullUpdateProcessor.new + return NullDataSource.new end requestor = Requestor.new(sdk_key, config) if config.stream? @@ -419,7 +419,7 @@ def sanitize_user(user) # Used internally when the client is offline. # @private # - class NullUpdateProcessor + class NullDataSource def start e = Concurrent::Event.new e.set diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index e894077f..e1379dc4 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -7,7 +7,7 @@ let(:offline_client) do subject.new("secret", offline_config) end - let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } + let(:null_data) { LaunchDarkly::NullDataSource.new } let(:logger) { double().as_null_object } let(:config) { LaunchDarkly::Config.new({ send_events: false, data_source: null_data, logger: logger }) } let(:client) do @@ -497,7 +497,7 @@ def init(all_data) end end - class FakeUpdateProcessor + class FakeDataSource def initialize(store, data) @store = store @data = data @@ -520,7 +520,7 @@ def initialized? it "passes data set to data store in correct order on init" do store = FakeDataStore.new - data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.data_store, + data_source_factory = lambda { |sdk_key, config| FakeDataSource.new(config.data_store, dependency_ordering_test_data) } config = LaunchDarkly::Config.new(send_events: false, data_store: store, data_source: data_source_factory) client = subject.new("secret", config) From 1cfcd527c38b7eca57fe9f52b88e41316efd2836 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 14 Jan 2020 16:32:01 -0800 Subject: [PATCH 147/182] add event payload ID header --- lib/ldclient-rb/events.rb | 3 +++ spec/events_spec.rb | 34 ++++++++++++++++++++++++++++++++-- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 184facc4..bb12f6ec 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,6 +1,7 @@ require "concurrent" require "concurrent/atomics" require "concurrent/executors" +require "securerandom" require "thread" require "time" @@ -359,6 +360,7 @@ def run(sdk_key, config, client, payload, formatter) events_out = formatter.make_output_events(payload.events, payload.summary) res = nil body = events_out.to_json + payload_id = SecureRandom.uuid (0..1).each do |attempt| if attempt > 0 config.logger.warn { "[LDClient] Will retry posting events after 1 second" } @@ -374,6 +376,7 @@ def run(sdk_key, config, client, payload, formatter) req["Authorization"] = sdk_key req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + req["X-LaunchDarkly-Payload-ID"] = payload_id req["Connection"] = "keep-alive" res = client.request(req) rescue StandardError => exn diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 16bee286..1108a3ac 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -416,6 +416,29 @@ expect(hc.get_request["authorization"]).to eq "sdk_key" end + it "sends unique payload IDs" do + @ep = subject.new("sdk_key", default_config, hc) + e = { kind: "identify", user: user } + + @ep.add_event(e) + @ep.flush + @ep.wait_until_inactive + req0 = hc.get_request + + @ep.add_event(e) + @ep.flush + @ep.wait_until_inactive + req1 = hc.get_request + + id0 = req0["x-launchdarkly-payload-id"] + id1 = req1["x-launchdarkly-payload-id"] + expect(id0).not_to be_nil + expect(id0).not_to eq "" + expect(id1).not_to be nil + expect(id1).not_to eq "" + expect(id1).not_to eq id0 + end + def verify_unrecoverable_http_error(status) @ep = subject.new("sdk_key", default_config, hc) e = { kind: "identify", user: user } @@ -442,8 +465,15 @@ def verify_recoverable_http_error(status) @ep.flush @ep.wait_until_inactive - expect(hc.get_request).not_to be_nil - expect(hc.get_request).not_to be_nil + req0 = hc.get_request + expect(req0).not_to be_nil + req1 = hc.get_request + expect(req1).not_to be_nil + id0 = req0["x-launchdarkly-payload-id"] + expect(id0).not_to be_nil + expect(id0).not_to eq "" + expect(req1["x-launchdarkly-payload-id"]).to eq id0 + expect(hc.get_request).to be_nil # no 3rd request # now verify that a subsequent flush still generates a request From 9865a9847aa0405c2bd6b51ab9c8890ab8634a28 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 17 Jan 2020 18:40:56 -0800 Subject: [PATCH 148/182] (6.0) drop support for old Ruby versions --- .circleci/config.yml | 89 +++++--------------------------------------- README.md | 2 +- 2 files changed, 11 insertions(+), 80 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index c6ff6938..f976071f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -4,12 +4,10 @@ workflows: version: 2 test: jobs: - - test-misc-rubies - - test-2.2 - - test-2.3 - test-2.4 - test-2.5 - test-2.6 + - test-2.7 - test-jruby-9.2 ruby-docker-template: &ruby-docker-template @@ -30,105 +28,38 @@ ruby-docker-template: &ruby-docker-template path: ./rspec jobs: - test-2.2: - <<: *ruby-docker-template - docker: - - image: circleci/ruby:2.2.10-jessie - - image: consul - - image: redis - - image: amazon/dynamodb-local - test-2.3: + test-2.4: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.3.7-jessie + - image: circleci/ruby:2.4 - image: consul - image: redis - image: amazon/dynamodb-local - test-2.4: + test-2.5: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.4.5-stretch + - image: circleci/ruby:2.5 - image: consul - image: redis - image: amazon/dynamodb-local - test-2.5: + test-2.6: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.5.3-stretch + - image: circleci/ruby:2.6 - image: consul - image: redis - image: amazon/dynamodb-local - test-2.6: + test-2.7: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.6.2-stretch + - image: circleci/ruby:2.7 - image: consul - image: redis - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: - - image: circleci/jruby:9-jdk + - image: circleci/jruby:9.2-jdk - image: consul - image: redis - image: amazon/dynamodb-local - - # The following very slow job uses an Ubuntu container to run the Ruby versions that - # CircleCI doesn't provide Docker images for. - test-misc-rubies: - machine: - image: circleci/classic:latest - environment: - - RUBIES: "jruby-9.1.17.0" - steps: - - run: sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" - - run: sudo apt-get -q update - - run: sudo apt-get -qy install redis-server - - run: sudo apt-cache policy docker-ce - - run: sudo apt-get -qy install docker-ce - - checkout - - run: - name: install all Ruby versions - command: "parallel rvm install ::: $RUBIES" - - run: - name: bundle install for all versions - shell: /bin/bash -leo pipefail # need -l in order for "rvm use" to work - command: | - set -e; - for i in $RUBIES; - do - rvm use $i; - if [[ $i == jruby* ]]; then - gem install jruby-openssl; # required by bundler, no effect on Ruby MRI - fi - # bundler 2.0 may be preinstalled, we need to remove it if so - yes | gem uninstall bundler --version '>=2.0' || true; - gem install bundler -v 1.17.3; - bundle install; - mv Gemfile.lock "Gemfile.lock.$i" - done - - run: - name: start DynamoDB - command: docker run -p 8000:8000 amazon/dynamodb-local - background: true - - run: - name: download Consul - command: wget https://releases.hashicorp.com/consul/0.8.0/consul_0.8.0_linux_amd64.zip - - run: - name: extract Consul - command: unzip consul_0.8.0_linux_amd64.zip - - run: - name: start Consul - command: ./consul agent -dev - background: true - - run: - name: run tests for all versions - shell: /bin/bash -leo pipefail - command: | - set -e; - for i in $RUBIES; - do - rvm use $i; - cp "Gemfile.lock.$i" Gemfile.lock; - bundle exec rspec spec; - done diff --git a/README.md b/README.md index d3f99b69..c6a6adfc 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ LaunchDarkly overview Supported Ruby versions ----------------------- -This version of the LaunchDarkly SDK has a minimum Ruby version of 2.2.6, or 9.1.6 for JRuby. +This version of the LaunchDarkly SDK has a minimum Ruby version of 2.3.0, or 9.2.0 for JRuby. Getting started ----------- From bb0d3b1dfffe892bc6d58f2ab072d5e21ba331b3 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Jan 2020 17:38:45 -0800 Subject: [PATCH 149/182] add Ruby version constraint to gemspec --- launchdarkly-server-sdk.gemspec | 1 + 1 file changed, 1 insertion(+) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 2e95cd41..264cf16f 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -20,6 +20,7 @@ Gem::Specification.new do |spec| spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] spec.extensions = 'ext/mkrf_conf.rb' + spec.required_ruby_version = ">= 2.4.0" spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" spec.add_development_dependency "bundler", "~> 1.7" From 4aaf75eb7869cd4f6c6db06b571e1d687aef81da Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 9 Mar 2020 16:45:49 -0700 Subject: [PATCH 150/182] remove Rake dependency --- Gemfile.lock | 2 -- Rakefile | 5 ----- launchdarkly-server-sdk.gemspec | 1 - 3 files changed, 8 deletions(-) delete mode 100644 Rakefile diff --git a/Gemfile.lock b/Gemfile.lock index 5bd07e9c..8ae43040 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -49,7 +49,6 @@ GEM rb-inotify (~> 0.9, >= 0.9.7) ruby_dep (~> 1.2) multipart-post (2.0.0) - rake (10.5.0) rb-fsevent (0.10.3) rb-inotify (0.9.10) ffi (>= 0.5.0, < 2) @@ -92,7 +91,6 @@ DEPENDENCIES diplomat (>= 2.0.2) launchdarkly-server-sdk! listen (~> 3.0) - rake (~> 10.0) redis (~> 3.3.5) rspec (~> 3.2) rspec_junit_formatter (~> 0.3.0) diff --git a/Rakefile b/Rakefile deleted file mode 100644 index fd36e8a5..00000000 --- a/Rakefile +++ /dev/null @@ -1,5 +0,0 @@ -require "bundler/gem_tasks" - -require "rspec/core/rake_task" -RSpec::Core::RakeTask.new(:spec) -task default: :spec diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 2e95cd41..eaf52cd5 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -28,7 +28,6 @@ Gem::Specification.new do |spec| spec.add_development_dependency "diplomat", ">= 2.0.2" spec.add_development_dependency "redis", "~> 3.3.5" spec.add_development_dependency "connection_pool", ">= 2.1.2" - spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0" spec.add_development_dependency "timecop", "~> 0.9.1" spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb From 441a1953c9af36d682c79e29ae87aa3e83f3d923 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 10 Mar 2020 10:50:04 -0700 Subject: [PATCH 151/182] update ld-eventsource to 1.0.2 which doesn't have Rake dependency --- Gemfile.lock | 6 +++--- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 8ae43040..77a3bf7d 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -4,7 +4,7 @@ PATH launchdarkly-server-sdk (5.6.2) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) - ld-eventsource (= 1.0.1) + ld-eventsource (= 1.0.2) semantic (~> 1.6) GEM @@ -23,7 +23,7 @@ GEM aws-sigv4 (1.0.3) codeclimate-test-reporter (0.6.0) simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.1.5) + concurrent-ruby (1.1.6) connection_pool (2.2.1) diff-lcs (1.3) diplomat (2.0.2) @@ -40,7 +40,7 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) - ld-eventsource (1.0.1) + ld-eventsource (1.0.2) concurrent-ruby (~> 1.0) http_tools (~> 0.4.5) socketry (~> 0.5.1) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index eaf52cd5..f69c74fa 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -35,5 +35,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" - spec.add_runtime_dependency "ld-eventsource", "1.0.1" + spec.add_runtime_dependency "ld-eventsource", "1.0.2" end From 602c5e6dc3b41f2dbae982913a39255d90e9101d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 10 Mar 2020 13:01:45 -0700 Subject: [PATCH 152/182] implement diagnostic events in Ruby (#130) --- lib/ldclient-rb/config.rb | 64 ++ lib/ldclient-rb/events.rb | 180 ++-- lib/ldclient-rb/flags_state.rb | 2 +- lib/ldclient-rb/impl/diagnostic_events.rb | 130 +++ lib/ldclient-rb/impl/event_sender.rb | 72 ++ lib/ldclient-rb/impl/util.rb | 19 + lib/ldclient-rb/ldclient.rb | 21 +- lib/ldclient-rb/requestor.rb | 3 +- lib/ldclient-rb/stream.rb | 23 +- spec/diagnostic_events_spec.rb | 163 +++ spec/evaluation_spec.rb | 2 +- spec/event_sender_spec.rb | 179 ++++ spec/events_spec.rb | 961 ++++++++---------- spec/file_data_source_spec.rb | 2 +- spec/http_util.rb | 17 +- .../integrations/consul_feature_store_spec.rb | 2 - .../dynamodb_feature_store_spec.rb | 2 - spec/ldclient_spec.rb | 2 +- spec/polling_spec.rb | 2 +- spec/redis_feature_store_spec.rb | 3 - spec/requestor_spec.rb | 24 +- spec/spec_helper.rb | 3 + 22 files changed, 1237 insertions(+), 639 deletions(-) create mode 100644 lib/ldclient-rb/impl/diagnostic_events.rb create mode 100644 lib/ldclient-rb/impl/event_sender.rb create mode 100644 lib/ldclient-rb/impl/util.rb create mode 100644 spec/diagnostic_events_spec.rb create mode 100644 spec/event_sender_spec.rb diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index c7c42e56..f3612756 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -37,6 +37,10 @@ class Config # @option opts [Object] :data_source See {#data_source}. # @option opts [Object] :update_processor Obsolete synonym for `data_source`. # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. + # @option opts [Boolean] :diagnostic_opt_out (false) See {#diagnostic_opt_out?}. + # @option opts [Float] :diagnostic_recording_interval (900) See {#diagnostic_recording_interval}. + # @option opts [String] :wrapper_name See {#wrapper_name}. + # @option opts [String] :wrapper_version See {#wrapper_version}. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -62,6 +66,11 @@ def initialize(opts = {}) @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] @update_processor = opts[:update_processor] @update_processor_factory = opts[:update_processor_factory] + @diagnostic_opt_out = opts.has_key?(:diagnostic_opt_out) && opts[:diagnostic_opt_out] + @diagnostic_recording_interval = opts.has_key?(:diagnostic_recording_interval) && opts[:diagnostic_recording_interval] > Config.minimum_diagnostic_recording_interval ? + opts[:diagnostic_recording_interval] : Config.default_diagnostic_recording_interval + @wrapper_name = opts[:wrapper_name] + @wrapper_version = opts[:wrapper_version] end # @@ -257,6 +266,45 @@ def offline? # @deprecated This is replaced by {#data_source}. attr_reader :update_processor_factory + # + # Set to true to opt out of sending diagnostics data. + # + # Unless `diagnostic_opt_out` is set to true, the client will send some diagnostics data to the LaunchDarkly servers + # in order to assist in the development of future SDK improvements. These diagnostics consist of an initial payload + # containing some details of the SDK in use, the SDK's configuration, and the platform the SDK is being run on, as + # well as periodic information on irregular occurrences such as dropped events. + # @return [Boolean] + # + def diagnostic_opt_out? + @diagnostic_opt_out + end + + # + # The interval at which periodic diagnostic data is sent, in seconds. + # + # The default is 900 (every 15 minutes) and the minimum value is 60 (every minute). + # @return [Float] + # + attr_reader :diagnostic_recording_interval + + # + # For use by wrapper libraries to set an identifying name for the wrapper being used. + # + # This will be sent in User-Agent headers during requests to the LaunchDarkly servers to allow recording + # metrics on the usage of these wrapper libraries. + # @return [String] + # + attr_reader :wrapper_name + + # + # For use by wrapper libraries to report the version of the library in use. + # + # If `wrapper_name` is not set, this field will be ignored. Otherwise the version string will be included in + # the User-Agent headers along with the `wrapper_name` during requests to the LaunchDarkly servers. + # @return [String] + # + attr_reader :wrapper_version + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. @@ -407,5 +455,21 @@ def self.default_user_keys_capacity def self.default_user_keys_flush_interval 300 end + + # + # The default value for {#diagnostic_recording_interval}. + # @return [Float] 900 + # + def self.default_diagnostic_recording_interval + 900 + end + + # + # The minimum value for {#diagnostic_recording_interval}. + # @return [Float] 60 + # + def self.minimum_diagnostic_recording_interval + 60 + end end end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index bb12f6ec..9313b670 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,7 +1,10 @@ +require "ldclient-rb/impl/diagnostic_events" +require "ldclient-rb/impl/event_sender" +require "ldclient-rb/impl/util" + require "concurrent" require "concurrent/atomics" require "concurrent/executors" -require "securerandom" require "thread" require "time" @@ -24,12 +27,10 @@ module LaunchDarkly MAX_FLUSH_WORKERS = 5 - CURRENT_SCHEMA_VERSION = 3 USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName, :avatar, :name ] private_constant :MAX_FLUSH_WORKERS - private_constant :CURRENT_SCHEMA_VERSION private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS # @private @@ -60,6 +61,10 @@ class FlushMessage class FlushUsersMessage end + # @private + class DiagnosticEventMessage + end + # @private class SynchronousMessage def initialize @@ -85,9 +90,9 @@ class StopMessage < SynchronousMessage # @private class EventProcessor - def initialize(sdk_key, config, client = nil) + def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test_properties = nil) @logger = config.logger - @inbox = SizedQueue.new(config.capacity) + @inbox = SizedQueue.new(config.capacity < 100 ? 100 : config.capacity) @flush_task = Concurrent::TimerTask.new(execution_interval: config.flush_interval) do post_to_inbox(FlushMessage.new) end @@ -96,14 +101,29 @@ def initialize(sdk_key, config, client = nil) post_to_inbox(FlushUsersMessage.new) end @users_flush_task.execute + if !diagnostic_accumulator.nil? + interval = test_properties && test_properties.has_key?(:diagnostic_recording_interval) ? + test_properties[:diagnostic_recording_interval] : + config.diagnostic_recording_interval + @diagnostic_event_task = Concurrent::TimerTask.new(execution_interval: interval) do + post_to_inbox(DiagnosticEventMessage.new) + end + @diagnostic_event_task.execute + else + @diagnostic_event_task = nil + end @stopped = Concurrent::AtomicBoolean.new(false) @inbox_full = Concurrent::AtomicBoolean.new(false) - EventDispatcher.new(@inbox, sdk_key, config, client) + event_sender = test_properties && test_properties.has_key?(:event_sender) ? + test_properties[:event_sender] : + Impl::EventSender.new(sdk_key, config, client ? client : Util.new_http_client(config.events_uri, config)) + + EventDispatcher.new(@inbox, sdk_key, config, diagnostic_accumulator, event_sender) end def add_event(event) - event[:creationDate] = (Time.now.to_f * 1000).to_i + event[:creationDate] = Impl::Util.current_time_millis post_to_inbox(EventMessage.new(event)) end @@ -117,6 +137,7 @@ def stop if @stopped.make_true @flush_task.shutdown @users_flush_task.shutdown + @diagnostic_event_task.shutdown if !@diagnostic_event_task.nil? # Note that here we are not calling post_to_inbox, because we *do* want to wait if the inbox # is full; an orderly shutdown can't happen unless these messages are received. @inbox << FlushMessage.new @@ -152,34 +173,36 @@ def post_to_inbox(message) # @private class EventDispatcher - def initialize(inbox, sdk_key, config, client) + def initialize(inbox, sdk_key, config, diagnostic_accumulator, event_sender) @sdk_key = sdk_key @config = config - - if client - @client = client - else - @client = Util.new_http_client(@config.events_uri, @config) - end + @diagnostic_accumulator = config.diagnostic_opt_out? ? nil : diagnostic_accumulator + @event_sender = event_sender @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) @last_known_past_time = Concurrent::AtomicReference.new(0) - + @deduplicated_users = 0 + @events_in_last_batch = 0 + outbox = EventBuffer.new(config.capacity, config.logger) flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS) - Thread.new { main_loop(inbox, outbox, flush_workers) } + if !@diagnostic_accumulator.nil? + diagnostic_event_workers = NonBlockingThreadPool.new(1) + init_event = @diagnostic_accumulator.create_init_event(config) + send_diagnostic_event(init_event, diagnostic_event_workers) + else + diagnostic_event_workers = nil + end + + Thread.new { main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) } end private - def now_millis() - (Time.now.to_f * 1000).to_i - end - - def main_loop(inbox, outbox, flush_workers) + def main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) running = true while running do begin @@ -191,11 +214,13 @@ def main_loop(inbox, outbox, flush_workers) trigger_flush(outbox, flush_workers) when FlushUsersMessage @user_keys.clear + when DiagnosticEventMessage + send_and_reset_diagnostics(outbox, diagnostic_event_workers) when TestSyncMessage - synchronize_for_testing(flush_workers) + synchronize_for_testing(flush_workers, diagnostic_event_workers) message.completed when StopMessage - do_shutdown(flush_workers) + do_shutdown(flush_workers, diagnostic_event_workers) running = false message.completed end @@ -205,18 +230,23 @@ def main_loop(inbox, outbox, flush_workers) end end - def do_shutdown(flush_workers) + def do_shutdown(flush_workers, diagnostic_event_workers) flush_workers.shutdown flush_workers.wait_for_termination + if !diagnostic_event_workers.nil? + diagnostic_event_workers.shutdown + diagnostic_event_workers.wait_for_termination + end begin @client.finish rescue end end - def synchronize_for_testing(flush_workers) + def synchronize_for_testing(flush_workers, diagnostic_event_workers) # Used only by unit tests. Wait until all active flush workers have finished. flush_workers.wait_all + diagnostic_event_workers.wait_all if !diagnostic_event_workers.nil? end def dispatch_event(event, outbox) @@ -260,7 +290,9 @@ def notice_user(user) if user.nil? || !user.has_key?(:key) true else - @user_keys.add(user[:key].to_s) + known = @user_keys.add(user[:key].to_s) + @deduplicated_users += 1 if known + known end end @@ -268,7 +300,7 @@ def should_debug_event(event) debug_until = event[:debugEventsUntilDate] if !debug_until.nil? last_past = @last_known_past_time.value - debug_until > last_past && debug_until > now_millis + debug_until > last_past && debug_until > Impl::Util.current_time_millis else false end @@ -281,34 +313,44 @@ def trigger_flush(outbox, flush_workers) payload = outbox.get_payload if !payload.events.empty? || !payload.summary.counters.empty? + count = payload.events.length + (payload.summary.counters.empty? ? 0 : 1) + @events_in_last_batch = count # If all available worker threads are busy, success will be false and no job will be queued. success = flush_workers.post do begin - resp = EventPayloadSendTask.new.run(@sdk_key, @config, @client, payload, @formatter) - handle_response(resp) if !resp.nil? + events_out = @formatter.make_output_events(payload.events, payload.summary) + result = @event_sender.send_event_data(events_out.to_json, false) + @disabled.value = true if result.must_shutdown + if !result.time_from_server.nil? + @last_known_past_time.value = (result.time_from_server.to_f * 1000).to_i + end rescue => e Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end outbox.clear if success # Reset our internal state, these events now belong to the flush worker + else + @events_in_last_batch = 0 end end - def handle_response(res) - status = res.code.to_i - if status >= 400 - message = Util.http_error_message(status, "event delivery", "some events were dropped") - @config.logger.error { "[LDClient] #{message}" } - if !Util.http_error_recoverable?(status) - @disabled.value = true - end - else - if !res["date"].nil? - begin - res_time = (Time.httpdate(res["date"]).to_f * 1000).to_i - @last_known_past_time.value = res_time - rescue ArgumentError - end + def send_and_reset_diagnostics(outbox, diagnostic_event_workers) + return if @diagnostic_accumulator.nil? + dropped_count = outbox.get_and_clear_dropped_count + event = @diagnostic_accumulator.create_periodic_event_and_reset(dropped_count, @deduplicated_users, @events_in_last_batch) + @deduplicated_users = 0 + @events_in_last_batch = 0 + send_diagnostic_event(event, diagnostic_event_workers) + end + + def send_diagnostic_event(event, diagnostic_event_workers) + return if diagnostic_event_workers.nil? + uri = URI(@config.events_uri + "/diagnostic") + diagnostic_event_workers.post do + begin + @event_sender.send_event_data(event.to_json, true) + rescue => e + Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end end @@ -323,6 +365,7 @@ def initialize(capacity, logger) @capacity = capacity @logger = logger @capacity_exceeded = false + @dropped_events = 0 @events = [] @summarizer = EventSummarizer.new end @@ -333,6 +376,7 @@ def add_event(event) @events.push(event) @capacity_exceeded = false else + @dropped_events += 1 if !@capacity_exceeded @capacity_exceeded = true @logger.warn { "[LDClient] Exceeded event queue capacity. Increase capacity to avoid dropping events." } @@ -348,54 +392,18 @@ def get_payload return FlushPayload.new(@events, @summarizer.snapshot) end + def get_and_clear_dropped_count + ret = @dropped_events + @dropped_events = 0 + ret + end + def clear @events = [] @summarizer.clear end end - # @private - class EventPayloadSendTask - def run(sdk_key, config, client, payload, formatter) - events_out = formatter.make_output_events(payload.events, payload.summary) - res = nil - body = events_out.to_json - payload_id = SecureRandom.uuid - (0..1).each do |attempt| - if attempt > 0 - config.logger.warn { "[LDClient] Will retry posting events after 1 second" } - sleep(1) - end - begin - client.start if !client.started? - config.logger.debug { "[LDClient] sending #{events_out.length} events: #{body}" } - uri = URI(config.events_uri + "/bulk") - req = Net::HTTP::Post.new(uri) - req.content_type = "application/json" - req.body = body - req["Authorization"] = sdk_key - req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION - req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - req["X-LaunchDarkly-Payload-ID"] = payload_id - req["Connection"] = "keep-alive" - res = client.request(req) - rescue StandardError => exn - config.logger.warn { "[LDClient] Error flushing events: #{exn.inspect}." } - next - end - status = res.code.to_i - if status < 200 || status >= 300 - if Util.http_error_recoverable?(status) - next - end - end - break - end - # used up our retries, return the last response if any - res - end - end - # @private class EventOutputFormatter def initialize(config) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 4efe1404..496ad61b 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -22,7 +22,7 @@ def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = fal meta = {} with_details = !details_only_if_tracked || flag[:trackEvents] if !with_details && flag[:debugEventsUntilDate] - with_details = flag[:debugEventsUntilDate] > (Time.now.to_f * 1000).to_i + with_details = flag[:debugEventsUntilDate] > Impl::Util::current_time_millis end if with_details meta[:version] = flag[:version] diff --git a/lib/ldclient-rb/impl/diagnostic_events.rb b/lib/ldclient-rb/impl/diagnostic_events.rb new file mode 100644 index 00000000..4c61a905 --- /dev/null +++ b/lib/ldclient-rb/impl/diagnostic_events.rb @@ -0,0 +1,130 @@ +require "ldclient-rb/impl/util" + +require "rbconfig" +require "securerandom" + +module LaunchDarkly + module Impl + class DiagnosticAccumulator + def self.create_diagnostic_id(sdk_key) + { + diagnosticId: SecureRandom.uuid, + sdkKeySuffix: sdk_key[-6..-1] || sdk_key + } + end + + def initialize(diagnostic_id) + @id = diagnostic_id + @lock = Mutex.new + self.reset(Util.current_time_millis) + end + + def reset(time) + @data_since_date = time + @stream_inits = [] + end + + def create_init_event(config) + return { + kind: 'diagnostic-init', + creationDate: Util.current_time_millis, + id: @id, + configuration: DiagnosticAccumulator.make_config_data(config), + sdk: DiagnosticAccumulator.make_sdk_data(config), + platform: DiagnosticAccumulator.make_platform_data + } + end + + def record_stream_init(timestamp, failed, duration_millis) + @lock.synchronize do + @stream_inits.push({ timestamp: timestamp, failed: failed, durationMillis: duration_millis }) + end + end + + def create_periodic_event_and_reset(dropped_events, deduplicated_users, events_in_last_batch) + previous_stream_inits = @lock.synchronize do + si = @stream_inits + @stream_inits = [] + si + end + + current_time = Util.current_time_millis + event = { + kind: 'diagnostic', + creationDate: current_time, + id: @id, + dataSinceDate: @data_since_date, + droppedEvents: dropped_events, + deduplicatedUsers: deduplicated_users, + eventsInLastBatch: events_in_last_batch, + streamInits: previous_stream_inits + } + @data_since_date = current_time + event + end + + def self.make_config_data(config) + ret = { + allAttributesPrivate: config.all_attributes_private, + connectTimeoutMillis: self.seconds_to_millis(config.connect_timeout), + customBaseURI: config.base_uri != Config.default_base_uri, + customEventsURI: config.events_uri != Config.default_events_uri, + customStreamURI: config.stream_uri != Config.default_stream_uri, + diagnosticRecordingIntervalMillis: self.seconds_to_millis(config.diagnostic_recording_interval), + eventsCapacity: config.capacity, + eventsFlushIntervalMillis: self.seconds_to_millis(config.flush_interval), + inlineUsersInEvents: config.inline_users_in_events, + pollingIntervalMillis: self.seconds_to_millis(config.poll_interval), + socketTimeoutMillis: self.seconds_to_millis(config.read_timeout), + streamingDisabled: !config.stream?, + userKeysCapacity: config.user_keys_capacity, + userKeysFlushIntervalMillis: self.seconds_to_millis(config.user_keys_flush_interval), + usingProxy: ENV.has_key?('http_proxy') || ENV.has_key?('https_proxy') || ENV.has_key?('HTTP_PROXY'), + usingRelayDaemon: config.use_ldd?, + } + ret + end + + def self.make_sdk_data(config) + ret = { + name: 'ruby-server-sdk', + version: LaunchDarkly::VERSION + } + if config.wrapper_name + ret[:wrapperName] = config.wrapper_name + ret[:wrapperVersion] = config.wrapper_version + end + ret + end + + def self.make_platform_data + conf = RbConfig::CONFIG + { + name: 'ruby', + osArch: conf['host_cpu'], + osName: self.normalize_os_name(conf['host_os']), + osVersion: 'unknown', # there seems to be no portable way to detect this in Ruby + rubyVersion: conf['ruby_version'], + rubyImplementation: Object.constants.include?(:RUBY_ENGINE) ? RUBY_ENGINE : 'unknown' + } + end + + def self.normalize_os_name(name) + case name + when /linux|arch/i + 'Linux' + when /darwin/i + 'MacOS' + when /mswin|windows/i + 'Windows' + else + name + end + end + + def self.seconds_to_millis(s) + (s * 1000).to_i + end + end + end +end diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb new file mode 100644 index 00000000..834cd3a3 --- /dev/null +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -0,0 +1,72 @@ +require "securerandom" + +module LaunchDarkly + module Impl + EventSenderResult = Struct.new(:success, :must_shutdown, :time_from_server) + + class EventSender + CURRENT_SCHEMA_VERSION = 3 + DEFAULT_RETRY_INTERVAL = 1 + + def initialize(sdk_key, config, http_client = nil, retry_interval = DEFAULT_RETRY_INTERVAL) + @client = http_client ? http_client : LaunchDarkly::Util.new_http_client(config.events_uri, config) + @sdk_key = sdk_key + @config = config + @events_uri = config.events_uri + "/bulk" + @diagnostic_uri = config.events_uri + "/diagnostic" + @logger = config.logger + @retry_interval = retry_interval + end + + def send_event_data(event_data, is_diagnostic) + uri = is_diagnostic ? @diagnostic_uri : @events_uri + payload_id = is_diagnostic ? nil : SecureRandom.uuid + description = is_diagnostic ? 'diagnostic event' : "#{event_data.length} events" + res = nil + (0..1).each do |attempt| + if attempt > 0 + @logger.warn { "[LDClient] Will retry posting events after #{@retry_interval} second" } + sleep(@retry_interval) + end + begin + @client.start if !@client.started? + @logger.debug { "[LDClient] sending #{description}: #{body}" } + req = Net::HTTP::Post.new(uri) + req.content_type = "application/json" + req.body = event_data + Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| req[k] = v } + if !is_diagnostic + req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + req["X-LaunchDarkly-Payload-ID"] = payload_id + end + req["Connection"] = "keep-alive" + res = @client.request(req) + rescue StandardError => exn + @logger.warn { "[LDClient] Error sending events: #{exn.inspect}." } + next + end + status = res.code.to_i + if status >= 200 && status < 300 + res_time = nil + if !res["date"].nil? + begin + res_time = Time.httpdate(res["date"]) + rescue ArgumentError + end + end + return EventSenderResult.new(true, false, res_time) + end + must_shutdown = !LaunchDarkly::Util.http_error_recoverable?(status) + can_retry = !must_shutdown && attempt == 0 + message = LaunchDarkly::Util.http_error_message(status, "event delivery", can_retry ? "will retry" : "some events were dropped") + @logger.error { "[LDClient] #{message}" } + if must_shutdown + return EventSenderResult.new(false, true, nil) + end + end + # used up our retries + return EventSenderResult.new(false, false, nil) + end + end + end +end diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb new file mode 100644 index 00000000..d1197afe --- /dev/null +++ b/lib/ldclient-rb/impl/util.rb @@ -0,0 +1,19 @@ + +module LaunchDarkly + module Impl + module Util + def self.current_time_millis + (Time.now.to_f * 1000).to_i + end + + def self.default_http_headers(sdk_key, config) + ret = { "Authorization" => sdk_key, "User-Agent" => "RubyClient/" + LaunchDarkly::VERSION } + if config.wrapper_name + ret["X-LaunchDarkly-Wrapper"] = config.wrapper_name + + (config.wrapper_version ? "/" + config.wrapper_version : "") + end + ret + end + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index b7c2ee85..06db4f00 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/diagnostic_events" require "ldclient-rb/impl/event_factory" require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" @@ -46,10 +47,16 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) updated_config.instance_variable_set(:@feature_store, @store) @config = updated_config + if !@config.offline? && @config.send_events && !@config.diagnostic_opt_out? + diagnostic_accumulator = Impl::DiagnosticAccumulator.new(Impl::DiagnosticAccumulator.create_diagnostic_id(sdk_key)) + else + diagnostic_accumulator = nil + end + if @config.offline? || !@config.send_events @event_processor = NullEventProcessor.new else - @event_processor = EventProcessor.new(sdk_key, config) + @event_processor = EventProcessor.new(sdk_key, config, diagnostic_accumulator) end if @config.use_ldd? @@ -59,7 +66,13 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) data_source_or_factory = @config.data_source || self.method(:create_default_data_source) if data_source_or_factory.respond_to? :call - @data_source = data_source_or_factory.call(sdk_key, @config) + # Currently, data source factories take two parameters unless they need to be aware of diagnostic_accumulator, in + # which case they take three parameters. This will be changed in the future to use a less awkware mechanism. + if data_source_or_factory.arity == 3 + @data_source = data_source_or_factory.call(sdk_key, @config, diagnostic_accumulator) + else + @data_source = data_source_or_factory.call(sdk_key, @config) + end else @data_source = data_source_or_factory end @@ -335,13 +348,13 @@ def close private - def create_default_data_source(sdk_key, config) + def create_default_data_source(sdk_key, config, diagnostic_accumulator) if config.offline? return NullUpdateProcessor.new end requestor = Requestor.new(sdk_key, config) if config.stream? - StreamProcessor.new(sdk_key, config, requestor) + StreamProcessor.new(sdk_key, config, requestor, diagnostic_accumulator) else config.logger.info { "Disabling streaming API" } config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index f7174787..eae0a193 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -51,8 +51,7 @@ def make_request(path) @client.start if !@client.started? uri = URI(@config.base_uri + path) req = Net::HTTP::Get.new(uri) - req["Authorization"] = @sdk_key - req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| req[k] = v } req["Connection"] = "keep-alive" cached = @cache.read(uri) if !cached.nil? diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index ddb7f669..e27fad32 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -24,7 +24,7 @@ module LaunchDarkly # @private class StreamProcessor - def initialize(sdk_key, config, requestor) + def initialize(sdk_key, config, requestor, diagnostic_accumulator = nil) @sdk_key = sdk_key @config = config @feature_store = config.feature_store @@ -33,6 +33,7 @@ def initialize(sdk_key, config, requestor) @started = Concurrent::AtomicBoolean.new(false) @stopped = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new + @connection_attempt_start_time = 0 end def initialized? @@ -44,18 +45,17 @@ def start @config.logger.info { "[LDClient] Initializing stream connection" } - headers = { - 'Authorization' => @sdk_key, - 'User-Agent' => 'RubyClient/' + LaunchDarkly::VERSION - } + headers = Impl::Util.default_http_headers(@sdk_key, @config) opts = { headers: headers, read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } + log_connection_started @es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn| conn.on_event { |event| process_message(event) } conn.on_error { |err| + log_connection_result(false) case err when SSE::Errors::HTTPStatusError status = err.status @@ -82,6 +82,7 @@ def stop private def process_message(message) + log_connection_result(true) method = message.type @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } if method == PUT @@ -137,5 +138,17 @@ def process_message(message) def key_for_path(kind, path) path.start_with?(KEY_PATHS[kind]) ? path[KEY_PATHS[kind].length..-1] : nil end + + def log_connection_started + @connection_attempt_start_time = Impl::Util::current_time_millis + end + + def log_connection_result(is_success) + if !@diagnostic_accumulator.nil? && @connection_attempt_start_time > 0 + @diagnostic_accumulator.record_stream_init(@connection_attempt_start_time, !is_success, + Impl::Util::current_time_millis - @connection_attempt_start_time) + @connection_attempt_start_time = 0 + end + end end end diff --git a/spec/diagnostic_events_spec.rb b/spec/diagnostic_events_spec.rb new file mode 100644 index 00000000..0c4ef058 --- /dev/null +++ b/spec/diagnostic_events_spec.rb @@ -0,0 +1,163 @@ +require "ldclient-rb/impl/diagnostic_events" + +require "spec_helper" + +module LaunchDarkly + module Impl + describe DiagnosticAccumulator do + subject { DiagnosticAccumulator } + + let(:sdk_key) { "sdk_key" } + let(:default_id) { subject.create_diagnostic_id("my-key") } + let(:default_acc) { subject.new(default_id) } + + it "creates unique ID with SDK key suffix" do + id1 = subject.create_diagnostic_id("1234567890") + expect(id1[:sdkKeySuffix]).to eq "567890" + expect(id1[:diagnosticId]).not_to be_nil + + id2 = subject.create_diagnostic_id("1234567890") + expect(id2[:diagnosticId]).not_to eq id1[:diagnosticId] + end + + describe "init event" do + def expected_default_config + { + allAttributesPrivate: false, + connectTimeoutMillis: Config.default_connect_timeout * 1000, + customBaseURI: false, + customEventsURI: false, + customStreamURI: false, + diagnosticRecordingIntervalMillis: Config.default_diagnostic_recording_interval * 1000, + eventsCapacity: Config.default_capacity, + eventsFlushIntervalMillis: Config.default_flush_interval * 1000, + inlineUsersInEvents: false, + pollingIntervalMillis: Config.default_poll_interval * 1000, + socketTimeoutMillis: Config.default_read_timeout * 1000, + streamingDisabled: false, + userKeysCapacity: Config.default_user_keys_capacity, + userKeysFlushIntervalMillis: Config.default_user_keys_flush_interval * 1000, + usingProxy: false, + usingRelayDaemon: false + } + end + + it "has basic fields" do + event = default_acc.create_init_event(Config.new) + expect(event[:kind]).to eq 'diagnostic-init' + expect(event[:creationDate]).not_to be_nil + expect(event[:id]).to eq default_id + end + + it "can have default config data" do + event = default_acc.create_init_event(Config.new) + expect(event[:configuration]).to eq expected_default_config + end + + it "can have custom config data" do + changes_and_expected = [ + [ { all_attributes_private: true }, { allAttributesPrivate: true } ], + [ { connect_timeout: 46 }, { connectTimeoutMillis: 46000 } ], + [ { base_uri: 'http://custom' }, { customBaseURI: true } ], + [ { events_uri: 'http://custom' }, { customEventsURI: true } ], + [ { stream_uri: 'http://custom' }, { customStreamURI: true } ], + [ { diagnostic_recording_interval: 9999 }, { diagnosticRecordingIntervalMillis: 9999000 } ], + [ { capacity: 4000 }, { eventsCapacity: 4000 } ], + [ { flush_interval: 46 }, { eventsFlushIntervalMillis: 46000 } ], + [ { inline_users_in_events: true }, { inlineUsersInEvents: true } ], + [ { poll_interval: 999 }, { pollingIntervalMillis: 999000 } ], + [ { read_timeout: 46 }, { socketTimeoutMillis: 46000 } ], + [ { stream: false }, { streamingDisabled: true } ], + [ { user_keys_capacity: 999 }, { userKeysCapacity: 999 } ], + [ { user_keys_flush_interval: 999 }, { userKeysFlushIntervalMillis: 999000 } ], + [ { use_ldd: true }, { usingRelayDaemon: true } ] + ] + changes_and_expected.each do |config_values, expected_values| + config = Config.new(config_values) + event = default_acc.create_init_event(config) + expect(event[:configuration]).to eq expected_default_config.merge(expected_values) + end + end + + it "detects proxy" do + begin + ENV["http_proxy"] = 'http://my-proxy' + event = default_acc.create_init_event(Config.new) + expect(event[:configuration][:usingProxy]).to be true + ensure + ENV["http_proxy"] = nil + end + end + + it "has expected SDK data" do + event = default_acc.create_init_event(Config.new) + expect(event[:sdk]).to eq ({ + name: 'ruby-server-sdk', + version: LaunchDarkly::VERSION + }) + end + + it "has expected SDK data with wrapper" do + event = default_acc.create_init_event(Config.new(wrapper_name: 'my-wrapper', wrapper_version: '2.0')) + expect(event[:sdk]).to eq ({ + name: 'ruby-server-sdk', + version: LaunchDarkly::VERSION, + wrapperName: 'my-wrapper', + wrapperVersion: '2.0' + }) + end + + it "has expected platform data" do + event = default_acc.create_init_event(Config.new) + expect(event[:platform]).to include ({ + name: 'ruby' + }) + end + end + + describe "periodic event" do + it "has correct default values" do + acc = subject.new(default_id) + event = acc.create_periodic_event_and_reset(2, 3, 4) + expect(event).to include({ + kind: 'diagnostic', + id: default_id, + droppedEvents: 2, + deduplicatedUsers: 3, + eventsInLastBatch: 4, + streamInits: [] + }) + expect(event[:creationDate]).not_to be_nil + expect(event[:dataSinceDate]).not_to be_nil + end + + it "can add stream init" do + acc = subject.new(default_id) + acc.record_stream_init(1000, false, 2000) + event = acc.create_periodic_event_and_reset(0, 0, 0) + expect(event[:streamInits]).to eq [{ timestamp: 1000, failed: false, durationMillis: 2000 }] + end + + it "resets fields after creating event" do + acc = subject.new(default_id) + acc.record_stream_init(1000, false, 2000) + event1 = acc.create_periodic_event_and_reset(2, 3, 4) + event2 = acc.create_periodic_event_and_reset(5, 6, 7) + expect(event1).to include ({ + droppedEvents: 2, + deduplicatedUsers: 3, + eventsInLastBatch: 4, + streamInits: [{ timestamp: 1000, failed: false, durationMillis: 2000 }] + }) + expect(event2).to include ({ + dataSinceDate: event1[:creationDate], + droppedEvents: 5, + deduplicatedUsers: 6, + eventsInLastBatch: 7, + streamInits: [] + }) + end + end + end + end +end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 2efbd745..14d5ed80 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -17,7 +17,7 @@ } } - let(:logger) { LaunchDarkly::Config.default_logger } + let(:logger) { $null_log } def boolean_flag_with_rules(rules) { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb new file mode 100644 index 00000000..e99761b7 --- /dev/null +++ b/spec/event_sender_spec.rb @@ -0,0 +1,179 @@ +require "ldclient-rb/impl/event_sender" + +require "http_util" +require "spec_helper" + +require "time" + +module LaunchDarkly + module Impl + describe EventSender do + subject { EventSender } + + let(:sdk_key) { "sdk_key" } + let(:fake_data) { '{"things":[]}' } + + def make_sender(server) + subject.new(sdk_key, Config.new(events_uri: server.base_uri.to_s, logger: $null_log), nil, 0.1) + end + + def with_sender_and_server + with_server do |server| + yield make_sender(server), server + end + end + + it "sends analytics event data" do + with_sender_and_server do |es, server| + server.setup_ok_response("/bulk", "") + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be true + expect(result.must_shutdown).to be false + expect(result.time_from_server).not_to be_nil + + req = server.await_request + expect(req.body).to eq fake_data + expect(req.header).to include({ + "authorization" => [ sdk_key ], + "content-type" => [ "application/json" ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + "x-launchdarkly-event-schema" => [ "3" ] + }) + expect(req.header['x-launchdarkly-payload-id']).not_to eq [] + end + end + + it "generates a new payload ID for each payload" do + with_sender_and_server do |es, server| + server.setup_ok_response("/bulk", "") + + result1 = es.send_event_data(fake_data, false) + result2 = es.send_event_data(fake_data, false) + expect(result1.success).to be true + expect(result2.success).to be true + + req1, body1 = server.await_request_with_body + req2, body2 = server.await_request_with_body + expect(body1).to eq fake_data + expect(body2).to eq fake_data + expect(req1.header['x-launchdarkly-payload-id']).not_to eq req2.header['x-launchdarkly-payload-id'] + end + end + + it "sends diagnostic event data" do + with_sender_and_server do |es, server| + server.setup_ok_response("/diagnostic", "") + + result = es.send_event_data(fake_data, true) + + expect(result.success).to be true + expect(result.must_shutdown).to be false + expect(result.time_from_server).not_to be_nil + + req, body = server.await_request_with_body + expect(body).to eq fake_data + expect(req.header).to include({ + "authorization" => [ sdk_key ], + "content-type" => [ "application/json" ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + }) + expect(req.header['x-launchdarkly-event-schema']).to eq [] + expect(req.header['x-launchdarkly-payload-id']).to eq [] + end + end + + it "can use a proxy server" do + with_server do |server| + server.setup_ok_response("/bulk", "") + + with_server(StubProxyServer.new) do |proxy| + begin + ENV["http_proxy"] = proxy.base_uri.to_s + + es = make_sender(server) + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be true + + req, body = server.await_request_with_body + expect(body).to eq fake_data + ensure + ENV["http_proxy"] = nil + end + end + end + end + + [400, 408, 429, 500].each do |status| + it "handles recoverable error #{status}" do + with_sender_and_server do |es, server| + req_count = 0 + server.setup_response("/bulk") do |req, res| + req_count = req_count + 1 + res.status = req_count == 2 ? 200 : status + end + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be true + expect(result.must_shutdown).to be false + expect(result.time_from_server).not_to be_nil + + expect(server.requests.count).to eq 2 + req1, body1 = server.await_request_with_body + req2, body2 = server.await_request_with_body + expect(body1).to eq fake_data + expect(body2).to eq fake_data + expect(req1.header['x-launchdarkly-payload-id']).to eq req2.header['x-launchdarkly-payload-id'] + end + end + end + + [400, 408, 429, 500].each do |status| + it "only retries error #{status} once" do + with_sender_and_server do |es, server| + req_count = 0 + server.setup_response("/bulk") do |req, res| + req_count = req_count + 1 + res.status = req_count == 3 ? 200 : status + end + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be false + expect(result.must_shutdown).to be false + expect(result.time_from_server).to be_nil + + expect(server.requests.count).to eq 2 + req1, body1 = server.await_request_with_body + req2, body2 = server.await_request_with_body + expect(body1).to eq fake_data + expect(body2).to eq fake_data + expect(req1.header['x-launchdarkly-payload-id']).to eq req2.header['x-launchdarkly-payload-id'] + end + end + end + + [401, 403].each do |status| + it "gives up after unrecoverable error #{status}" do + with_sender_and_server do |es, server| + server.setup_response("/bulk") do |req, res| + res.status = status + end + + result = es.send_event_data(fake_data, false) + + expect(result.success).to be false + expect(result.must_shutdown).to be true + expect(result.time_from_server).to be_nil + + expect(server.requests.count).to eq 1 + end + end + end + end + end +end diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 1108a3ac..a36fa95f 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -5,8 +5,8 @@ describe LaunchDarkly::EventProcessor do subject { LaunchDarkly::EventProcessor } - let(:default_config) { LaunchDarkly::Config.new } - let(:hc) { FakeHttpClient.new } + let(:default_config_opts) { { diagnostic_opt_out: true, logger: $null_log } } + let(:default_config) { LaunchDarkly::Config.new(default_config_opts) } let(:user) { { key: "userkey", name: "Red" } } let(:filtered_user) { { key: "userkey", privateAttrs: [ "name" ] } } let(:numeric_user) { { key: 1, secondary: 2, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, @@ -14,546 +14,508 @@ let(:stringified_numeric_user) { { key: '1', secondary: '2', ip: '3', country: '4', email: '5', firstName: '6', lastName: '7', avatar: '8', name: '9', anonymous: false, custom: { age: 99 } } } - after(:each) do - if !@ep.nil? - @ep.stop + def with_processor_and_sender(config) + sender = FakeEventSender.new + ep = subject.new("sdk_key", config, nil, nil, { event_sender: sender }) + begin + yield ep, sender + ensure + ep.stop end end it "queues identify event" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", key: user[:key], user: user } - @ep.add_event(e) + with_processor_and_sender(default_config) do |ep, sender| + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) - output = flush_and_get_events - expect(output).to contain_exactly(e) + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly(e) + end end it "filters user in identify event" do - config = LaunchDarkly::Config.new(all_attributes_private: true) - @ep = subject.new("sdk_key", config, hc) - e = { kind: "identify", key: user[:key], user: user } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly({ - kind: "identify", - key: user[:key], - creationDate: e[:creationDate], - user: filtered_user - }) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) + with_processor_and_sender(config) do |ep, sender| + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly({ + kind: "identify", + key: user[:key], + creationDate: e[:creationDate], + user: filtered_user + }) + end end it "stringifies built-in user attributes in identify event" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - e = { kind: "identify", key: numeric_user[:key], user: numeric_user } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - kind: "identify", - key: numeric_user[:key].to_s, - creationDate: e[:creationDate], - user: stringified_numeric_user - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + e = { kind: "identify", key: numeric_user[:key], user: numeric_user } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + kind: "identify", + key: numeric_user[:key].to_s, + creationDate: e[:creationDate], + user: stringified_numeric_user + ) + end end it "queues individual feature event with index event" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - eq(feature_event(fe, flag, false, nil)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, user)), + eq(feature_event(fe, flag, false, nil)), + include(:kind => "summary") + ) + end end it "filters user in index event" do - config = LaunchDarkly::Config.new(all_attributes_private: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, filtered_user)), - eq(feature_event(fe, flag, false, nil)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, filtered_user)), + eq(feature_event(fe, flag, false, nil)), + include(:kind => "summary") + ) + end end it "stringifies built-in user attributes in index event" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: numeric_user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, stringified_numeric_user)), - eq(feature_event(fe, flag, false, nil)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, stringified_numeric_user)), + eq(feature_event(fe, flag, false, nil)), + include(:kind => "summary") + ) + end end it "can include inline user in feature event" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(feature_event(fe, flag, false, user)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(feature_event(fe, flag, false, user)), + include(:kind => "summary") + ) + end end it "stringifies built-in user attributes in feature event" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: numeric_user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(feature_event(fe, flag, false, stringified_numeric_user)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(feature_event(fe, flag, false, stringified_numeric_user)), + include(:kind => "summary") + ) + end end it "filters user in feature event" do - config = LaunchDarkly::Config.new(all_attributes_private: true, inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(feature_event(fe, flag, false, filtered_user)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(feature_event(fe, flag, false, filtered_user)), + include(:kind => "summary") + ) + end end it "still generates index event if inline_users is true but feature event was not tracked" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - flag = { key: "flagkey", version: 11 } - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - include(:kind => "summary") - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: false + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, user)), + include(:kind => "summary") + ) + end end it "sets event kind to debug if flag is temporarily in debug mode" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: future_time - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - eq(feature_event(fe, flag, true, user)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + future_time = (Time.now.to_f * 1000).to_i + 1000000 + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: future_time + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, user)), + eq(feature_event(fe, flag, true, user)), + include(:kind => "summary") + ) + end end it "can be both debugging and tracking an event" do - @ep = subject.new("sdk_key", default_config, hc) - flag = { key: "flagkey", version: 11 } - future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: true, debugEventsUntilDate: future_time - } - @ep.add_event(fe) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - eq(feature_event(fe, flag, false, nil)), - eq(feature_event(fe, flag, true, user)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag = { key: "flagkey", version: 11 } + future_time = (Time.now.to_f * 1000).to_i + 1000000 + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: true, debugEventsUntilDate: future_time + } + ep.add_event(fe) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe, user)), + eq(feature_event(fe, flag, false, nil)), + eq(feature_event(fe, flag, true, user)), + include(:kind => "summary") + ) + end end it "ends debug mode based on client time if client time is later than server time" do - @ep = subject.new("sdk_key", default_config, hc) - - # Pick a server time that is somewhat behind the client time - server_time = (Time.now.to_f * 1000).to_i - 20000 - - # Send and flush an event we don't care about, just to set the last server time - hc.set_server_time(server_time) - @ep.add_event({ kind: "identify", user: { key: "otherUser" }}) - flush_and_get_events - - # Now send an event with debug mode on, with a "debug until" time that is further in - # the future than the server time, but in the past compared to the client. - flag = { key: "flagkey", version: 11 } - debug_until = server_time + 1000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until - } - @ep.add_event(fe) - - # Should get a summary event only, not a full feature event - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + # Pick a server time that is somewhat behind the client time + server_time = Time.now - 20 + + # Send and flush an event we don't care about, just to set the last server time + sender.result = LaunchDarkly::Impl::EventSenderResult.new(true, false, server_time) + ep.add_event({ kind: "identify", user: user }) + flush_and_get_events(ep, sender) + + # Now send an event with debug mode on, with a "debug until" time that is further in + # the future than the server time, but in the past compared to the client. + flag = { key: "flagkey", version: 11 } + debug_until = (server_time.to_f * 1000).to_i + 1000 + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until + } + ep.add_event(fe) + + # Should get a summary event only, not a full feature event + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + include(:kind => "summary") + ) + end end it "ends debug mode based on server time if server time is later than client time" do - @ep = subject.new("sdk_key", default_config, hc) - - # Pick a server time that is somewhat ahead of the client time - server_time = (Time.now.to_f * 1000).to_i + 20000 - - # Send and flush an event we don't care about, just to set the last server time - hc.set_server_time(server_time) - @ep.add_event({ kind: "identify", user: { key: "otherUser" }}) - flush_and_get_events - - # Now send an event with debug mode on, with a "debug until" time that is further in - # the future than the server time, but in the past compared to the client. - flag = { key: "flagkey", version: 11 } - debug_until = server_time - 1000 - fe = { - kind: "feature", key: "flagkey", version: 11, user: user, - variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until - } - @ep.add_event(fe) - - # Should get a summary event only, not a full feature event - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe, user)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + # Pick a server time that is somewhat ahead of the client time + server_time = Time.now + 20 + + # Send and flush an event we don't care about, just to set the last server time + sender.result = LaunchDarkly::Impl::EventSenderResult.new(true, false, server_time) + ep.add_event({ kind: "identify", user: user }) + flush_and_get_events(ep, sender) + + # Now send an event with debug mode on, with a "debug until" time that is further in + # the future than the server time, but in the past compared to the client. + flag = { key: "flagkey", version: 11 } + debug_until = (server_time.to_f * 1000).to_i - 1000 + fe = { + kind: "feature", key: "flagkey", version: 11, user: user, + variation: 1, value: "value", trackEvents: false, debugEventsUntilDate: debug_until + } + ep.add_event(fe) + + # Should get a summary event only, not a full feature event + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + include(:kind => "summary") + ) + end end it "generates only one index event for multiple events with same user" do - @ep = subject.new("sdk_key", default_config, hc) - flag1 = { key: "flagkey1", version: 11 } - flag2 = { key: "flagkey2", version: 22 } - future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe1 = { - kind: "feature", key: "flagkey1", version: 11, user: user, - variation: 1, value: "value", trackEvents: true - } - fe2 = { - kind: "feature", key: "flagkey2", version: 22, user: user, - variation: 1, value: "value", trackEvents: true - } - @ep.add_event(fe1) - @ep.add_event(fe2) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe1, user)), - eq(feature_event(fe1, flag1, false, nil)), - eq(feature_event(fe2, flag2, false, nil)), - include(:kind => "summary") - ) + with_processor_and_sender(default_config) do |ep, sender| + flag1 = { key: "flagkey1", version: 11 } + flag2 = { key: "flagkey2", version: 22 } + future_time = (Time.now.to_f * 1000).to_i + 1000000 + fe1 = { + kind: "feature", key: "flagkey1", version: 11, user: user, + variation: 1, value: "value", trackEvents: true + } + fe2 = { + kind: "feature", key: "flagkey2", version: 22, user: user, + variation: 1, value: "value", trackEvents: true + } + ep.add_event(fe1) + ep.add_event(fe2) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe1, user)), + eq(feature_event(fe1, flag1, false, nil)), + eq(feature_event(fe2, flag2, false, nil)), + include(:kind => "summary") + ) + end end it "summarizes non-tracked events" do - @ep = subject.new("sdk_key", default_config, hc) - flag1 = { key: "flagkey1", version: 11 } - flag2 = { key: "flagkey2", version: 22 } - future_time = (Time.now.to_f * 1000).to_i + 1000000 - fe1 = { - kind: "feature", key: "flagkey1", version: 11, user: user, - variation: 1, value: "value1", default: "default1" - } - fe2 = { - kind: "feature", key: "flagkey2", version: 22, user: user, - variation: 2, value: "value2", default: "default2" - } - @ep.add_event(fe1) - @ep.add_event(fe2) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(fe1, user)), - eq({ - kind: "summary", - startDate: fe1[:creationDate], - endDate: fe2[:creationDate], - features: { - flagkey1: { - default: "default1", - counters: [ - { version: 11, variation: 1, value: "value1", count: 1 } - ] - }, - flagkey2: { - default: "default2", - counters: [ - { version: 22, variation: 2, value: "value2", count: 1 } - ] + with_processor_and_sender(default_config) do |ep, sender| + flag1 = { key: "flagkey1", version: 11 } + flag2 = { key: "flagkey2", version: 22 } + future_time = (Time.now.to_f * 1000).to_i + 1000000 + fe1 = { + kind: "feature", key: "flagkey1", version: 11, user: user, + variation: 1, value: "value1", default: "default1" + } + fe2 = { + kind: "feature", key: "flagkey2", version: 22, user: user, + variation: 2, value: "value2", default: "default2" + } + ep.add_event(fe1) + ep.add_event(fe2) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(fe1, user)), + eq({ + kind: "summary", + startDate: fe1[:creationDate], + endDate: fe2[:creationDate], + features: { + flagkey1: { + default: "default1", + counters: [ + { version: 11, variation: 1, value: "value1", count: 1 } + ] + }, + flagkey2: { + default: "default2", + counters: [ + { version: 22, variation: 2, value: "value2", count: 1 } + ] + } } - } - }) - ) + }) + ) + end end it "queues custom event with user" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" }, metricValue: 1.5 } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(index_event(e, user)), - eq(custom_event(e, nil)) - ) + with_processor_and_sender(default_config) do |ep, sender| + e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" }, metricValue: 1.5 } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(index_event(e, user)), + eq(custom_event(e, nil)) + ) + end end it "can include inline user in custom event" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(custom_event(e, user)) - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(custom_event(e, user)) + ) + end end it "filters user in custom event" do - config = LaunchDarkly::Config.new(all_attributes_private: true, inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(custom_event(e, filtered_user)) - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(all_attributes_private: true, inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(custom_event(e, filtered_user)) + ) + end end it "stringifies built-in user attributes in custom event" do - config = LaunchDarkly::Config.new(inline_users_in_events: true) - @ep = subject.new("sdk_key", config, hc) - e = { kind: "custom", key: "eventkey", user: numeric_user } - @ep.add_event(e) - - output = flush_and_get_events - expect(output).to contain_exactly( - eq(custom_event(e, stringified_numeric_user)) - ) + config = LaunchDarkly::Config.new(default_config_opts.merge(inline_users_in_events: true)) + with_processor_and_sender(config) do |ep, sender| + e = { kind: "custom", key: "eventkey", user: numeric_user } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly( + eq(custom_event(e, stringified_numeric_user)) + ) + end end it "does a final flush when shutting down" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", key: user[:key], user: user } - @ep.add_event(e) - - @ep.stop - - output = get_events_from_last_request - expect(output).to contain_exactly(e) + with_processor_and_sender(default_config) do |ep, sender| + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) + + ep.stop + + output = sender.analytics_payloads.pop + expect(output).to contain_exactly(e) + end end it "sends nothing if there are no events" do - @ep = subject.new("sdk_key", default_config, hc) - @ep.flush - expect(hc.get_request).to be nil - end - - it "sends SDK key" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - @ep.add_event(e) - - @ep.flush - @ep.wait_until_inactive - - expect(hc.get_request["authorization"]).to eq "sdk_key" - end - - it "sends unique payload IDs" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - - @ep.add_event(e) - @ep.flush - @ep.wait_until_inactive - req0 = hc.get_request - - @ep.add_event(e) - @ep.flush - @ep.wait_until_inactive - req1 = hc.get_request - - id0 = req0["x-launchdarkly-payload-id"] - id1 = req1["x-launchdarkly-payload-id"] - expect(id0).not_to be_nil - expect(id0).not_to eq "" - expect(id1).not_to be nil - expect(id1).not_to eq "" - expect(id1).not_to eq id0 - end - - def verify_unrecoverable_http_error(status) - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - @ep.add_event(e) - - hc.set_response_status(status) - @ep.flush - @ep.wait_until_inactive - expect(hc.get_request).not_to be_nil - hc.reset - - @ep.add_event(e) - @ep.flush - @ep.wait_until_inactive - expect(hc.get_request).to be_nil - end - - def verify_recoverable_http_error(status) - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - @ep.add_event(e) - - hc.set_response_status(503) - @ep.flush - @ep.wait_until_inactive - - req0 = hc.get_request - expect(req0).not_to be_nil - req1 = hc.get_request - expect(req1).not_to be_nil - id0 = req0["x-launchdarkly-payload-id"] - expect(id0).not_to be_nil - expect(id0).not_to eq "" - expect(req1["x-launchdarkly-payload-id"]).to eq id0 - - expect(hc.get_request).to be_nil # no 3rd request - - # now verify that a subsequent flush still generates a request - hc.reset - @ep.add_event(e) - @ep.flush - @ep.wait_until_inactive - expect(hc.get_request).not_to be_nil + with_processor_and_sender(default_config) do |ep, sender| + ep.flush + ep.wait_until_inactive + expect(sender.analytics_payloads.empty?).to be true + end end - it "stops posting events after getting a 401 error" do - verify_unrecoverable_http_error(401) - end + it "stops posting events after unrecoverable error" do + with_processor_and_sender(default_config) do |ep, sender| + sender.result = LaunchDarkly::Impl::EventSenderResult.new(false, true, nil) + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) + flush_and_get_events(ep, sender) - it "stops posting events after getting a 403 error" do - verify_unrecoverable_http_error(403) + e = { kind: "identify", key: user[:key], user: user } + ep.add_event(e) + ep.flush + ep.wait_until_inactive + expect(sender.analytics_payloads.empty?).to be true + end end - it "retries after 408 error" do - verify_recoverable_http_error(408) - end + describe "diagnostic events" do + let(:default_id) { LaunchDarkly::Impl::DiagnosticAccumulator.create_diagnostic_id('sdk_key') } + let(:diagnostic_config) { LaunchDarkly::Config.new(diagnostic_opt_out: false, logger: $null_log) } - it "retries after 429 error" do - verify_recoverable_http_error(429) - end + def with_diagnostic_processor_and_sender(config) + sender = FakeEventSender.new + acc = LaunchDarkly::Impl::DiagnosticAccumulator.new(default_id) + ep = subject.new("sdk_key", config, nil, acc, + { diagnostic_recording_interval: 0.2, event_sender: sender }) + begin + yield ep, sender + ensure + ep.stop + end + end - it "retries after 503 error" do - verify_recoverable_http_error(503) - end + it "sends init event" do + with_diagnostic_processor_and_sender(diagnostic_config) do |ep, sender| + event = sender.diagnostic_payloads.pop + expect(event).to include({ + kind: 'diagnostic-init', + id: default_id + }) + end + end - it "retries flush once after connection error" do - @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "identify", user: user } - @ep.add_event(e) + it "sends periodic event" do + with_diagnostic_processor_and_sender(diagnostic_config) do |ep, sender| + init_event = sender.diagnostic_payloads.pop + periodic_event = sender.diagnostic_payloads.pop + expect(periodic_event).to include({ + kind: 'diagnostic', + id: default_id, + droppedEvents: 0, + deduplicatedUsers: 0, + eventsInLastBatch: 0, + streamInits: [] + }) + end + end - hc.set_exception(IOError.new("deliberate error")) - @ep.flush - @ep.wait_until_inactive + it "counts events in queue from last flush and dropped events" do + config = LaunchDarkly::Config.new(diagnostic_opt_out: false, capacity: 2, logger: $null_log) + with_diagnostic_processor_and_sender(config) do |ep, sender| + init_event = sender.diagnostic_payloads.pop + + ep.add_event({ kind: 'identify', user: user }) + ep.add_event({ kind: 'identify', user: user }) + ep.add_event({ kind: 'identify', user: user }) + flush_and_get_events(ep, sender) + + periodic_event = sender.diagnostic_payloads.pop + expect(periodic_event).to include({ + kind: 'diagnostic', + droppedEvents: 1, + eventsInLastBatch: 2 + }) + end + end - expect(hc.get_request).not_to be_nil - expect(hc.get_request).not_to be_nil - expect(hc.get_request).to be_nil # no 3rd request - end + it "counts deduplicated users" do + with_diagnostic_processor_and_sender(diagnostic_config) do |ep, sender| + init_event = sender.diagnostic_payloads.pop - it "makes actual HTTP request with correct headers" do - e = { kind: "identify", key: user[:key], user: user } - with_server do |server| - server.setup_ok_response("/bulk", "") - - @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) - @ep.add_event(e) - @ep.flush - - req = server.await_request - expect(req.header).to include({ - "authorization" => [ "sdk_key" ], - "content-type" => [ "application/json" ], - "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], - "x-launchdarkly-event-schema" => [ "3" ] - }) - end - end + ep.add_event({ kind: 'custom', key: 'event1', user: user }) + ep.add_event({ kind: 'custom', key: 'event2', user: user }) + events = flush_and_get_events(ep, sender) - it "can use a proxy server" do - e = { kind: "identify", key: user[:key], user: user } - with_server do |server| - server.setup_ok_response("/bulk", "") - - with_server(StubProxyServer.new) do |proxy| - begin - ENV["http_proxy"] = proxy.base_uri.to_s - @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) - @ep.add_event(e) - @ep.flush - - req = server.await_request - expect(req["content-type"]).to eq("application/json") - ensure - ENV["http_proxy"] = nil - end + periodic_event = sender.diagnostic_payloads.pop + expect(periodic_event).to include({ + kind: 'diagnostic', + deduplicatedUsers: 1 + }) end end end @@ -599,75 +561,26 @@ def custom_event(e, inline_user) out end - def flush_and_get_events - @ep.flush - @ep.wait_until_inactive - get_events_from_last_request + def flush_and_get_events(ep, sender) + ep.flush + ep.wait_until_inactive + sender.analytics_payloads.pop end - def get_events_from_last_request - req = hc.get_request - JSON.parse(req.body, symbolize_names: true) - end + class FakeEventSender + attr_accessor :result + attr_reader :analytics_payloads + attr_reader :diagnostic_payloads - class FakeHttpClient def initialize - reset - end - - def set_response_status(status) - @status = status - end - - def set_server_time(time_millis) - @server_time = Time.at(time_millis.to_f / 1000) - end - - def set_exception(e) - @exception = e - end - - def reset - @requests = [] - @status = 200 - end - - def request(req) - @requests.push(req) - if @exception - raise @exception - else - headers = {} - if @server_time - headers["Date"] = @server_time.httpdate - end - FakeResponse.new(@status ? @status : 200, headers) - end + @result = LaunchDarkly::Impl::EventSenderResult.new(true, false, nil) + @analytics_payloads = Queue.new + @diagnostic_payloads = Queue.new end - def start - end - - def started? - false - end - - def finish - end - - def get_request - @requests.shift - end - end - - class FakeResponse - include Net::HTTPHeader - - attr_reader :code - - def initialize(status, headers) - @code = status.to_s - initialize_http_header(headers) + def send_event_data(data, is_diagnostic) + (is_diagnostic ? @diagnostic_payloads : @analytics_payloads).push(JSON.parse(data, symbolize_names: true)) + @result end end end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 837b775d..212d057b 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -95,7 +95,7 @@ def []=(key, value) let(:bad_file_path) { "no-such-file" } before do - @config = LaunchDarkly::Config.new + @config = LaunchDarkly::Config.new(logger: $null_log) @store = @config.feature_store @tmp_dir = Dir.mktmpdir end diff --git a/spec/http_util.rb b/spec/http_util.rb index e43e2ded..27032589 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -52,6 +52,13 @@ def setup_response(uri_path, &action) @server.mount_proc(uri_path, action) end + def setup_status_response(uri_path, status, headers={}) + setup_response(uri_path) do |req, res| + res.status = status + headers.each { |n, v| res[n] = v } + end + end + def setup_ok_response(uri_path, body, content_type=nil, headers={}) setup_response(uri_path) do |req, res| res.status = 200 @@ -63,11 +70,17 @@ def setup_ok_response(uri_path, body, content_type=nil, headers={}) def record_request(req, res) @requests.push(req) - @requests_queue << req + @requests_queue << [req, req.body] end def await_request - @requests_queue.pop + r = @requests_queue.pop + r[0] + end + + def await_request_with_body + r = @requests_queue.pop + return r[0], r[1] end end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index e74d0f0d..bad1e736 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -4,8 +4,6 @@ $my_prefix = 'testprefix' -$null_log = ::Logger.new($stdout) -$null_log.level = ::Logger::FATAL $consul_base_opts = { prefix: $my_prefix, diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 7734670e..3b95edc8 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -6,8 +6,6 @@ $table_name = 'LD_DYNAMODB_TEST_TABLE' $endpoint = 'http://localhost:8000' $my_prefix = 'testprefix' -$null_log = ::Logger.new($stdout) -$null_log.level = ::Logger::FATAL $dynamodb_opts = { credentials: Aws::Credentials.new("key", "secret"), diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 4672a662..1d3bb506 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -461,7 +461,7 @@ def event_processor end describe 'with send_events: true' do - let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, data_source: null_data}) } + let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, diagnostic_opt_out: true, data_source: null_data}) } let(:client_with_events) { subject.new("secret", config_with_events) } it "does not use a NullEventProcessor" do diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index 690147d0..b0eb46c5 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -6,7 +6,7 @@ let(:requestor) { double() } def with_processor(store) - config = LaunchDarkly::Config.new(feature_store: store) + config = LaunchDarkly::Config.new(feature_store: store, logger: $null_log) processor = subject.new(config, requestor) begin yield processor diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index 5aec6658..cf69f334 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -4,10 +4,7 @@ require "spec_helper" - $my_prefix = 'testprefix' -$null_log = ::Logger.new($stdout) -$null_log.level = ::Logger::FATAL $base_opts = { prefix: $my_prefix, diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 502f6d86..6833ea1f 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -4,10 +4,13 @@ $sdk_key = "secret" describe LaunchDarkly::Requestor do - def with_requestor(base_uri) - r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new(base_uri: base_uri)) - yield r - r.stop + def with_requestor(base_uri, opts = {}) + r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new({ base_uri: base_uri }.merge(opts))) + begin + yield r + ensure + r.stop + end end describe "request_all_flags" do @@ -56,6 +59,19 @@ def with_requestor(base_uri) end end + it "sends wrapper header if configured" do + with_server do |server| + with_requestor(server.base_uri.to_s, { wrapper_name: 'MyWrapper', wrapper_version: '1.0' }) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_all_data() + expect(server.requests.count).to eq 1 + expect(server.requests[0].header).to include({ + "x-launchdarkly-wrapper" => [ "MyWrapper/1.0" ] + }) + end + end + end + it "can reuse cached data" do etag = "xyz" expected_data = { flags: { x: { key: "x" } } } diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index cc5e312b..52926ac1 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -3,6 +3,9 @@ require "ldclient-rb" +$null_log = ::Logger.new($stdout) +$null_log.level = ::Logger::FATAL + RSpec.configure do |config| config.before(:each) do end From ddfbd17bbbbe04b5c3d4968f291948f35a89b430 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 17 Mar 2020 18:28:07 -0700 Subject: [PATCH 153/182] update ruby-eventsource to 1.0.3 for backoff bug --- Gemfile.lock | 4 ++-- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 0087dad9..91f72a95 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -4,7 +4,7 @@ PATH launchdarkly-server-sdk (5.7.0) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) - ld-eventsource (= 1.0.2) + ld-eventsource (= 1.0.3) semantic (~> 1.6) GEM @@ -40,7 +40,7 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) - ld-eventsource (1.0.2) + ld-eventsource (1.0.3) concurrent-ruby (~> 1.0) http_tools (~> 0.4.5) socketry (~> 0.5.1) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index f69c74fa..237474ef 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -35,5 +35,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" - spec.add_runtime_dependency "ld-eventsource", "1.0.2" + spec.add_runtime_dependency "ld-eventsource", "1.0.3" end From 1fe77bb2da953768b251f3eced173d1689a89c32 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 27 Mar 2020 12:01:10 -0700 Subject: [PATCH 154/182] fix incorrect initialization of EventProcessor --- lib/ldclient-rb/events.rb | 4 +- lib/ldclient-rb/impl/event_sender.rb | 5 +- lib/ldclient-rb/ldclient.rb | 2 +- spec/event_sender_spec.rb | 16 ++-- spec/events_spec.rb | 2 +- spec/ldclient_end_to_end_spec.rb | 123 +++++++++++++++++++++++++++ 6 files changed, 137 insertions(+), 15 deletions(-) create mode 100644 spec/ldclient_end_to_end_spec.rb diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 9313b670..0b65f3d5 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -319,7 +319,7 @@ def trigger_flush(outbox, flush_workers) success = flush_workers.post do begin events_out = @formatter.make_output_events(payload.events, payload.summary) - result = @event_sender.send_event_data(events_out.to_json, false) + result = @event_sender.send_event_data(events_out.to_json, "#{events_out.length} events", false) @disabled.value = true if result.must_shutdown if !result.time_from_server.nil? @last_known_past_time.value = (result.time_from_server.to_f * 1000).to_i @@ -348,7 +348,7 @@ def send_diagnostic_event(event, diagnostic_event_workers) uri = URI(@config.events_uri + "/diagnostic") diagnostic_event_workers.post do begin - @event_sender.send_event_data(event.to_json, true) + @event_sender.send_event_data(event.to_json, "diagnostic event", true) rescue => e Util.log_exception(@config.logger, "Unexpected error in event processor", e) end diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index 834cd3a3..f6da0843 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -18,10 +18,9 @@ def initialize(sdk_key, config, http_client = nil, retry_interval = DEFAULT_RETR @retry_interval = retry_interval end - def send_event_data(event_data, is_diagnostic) + def send_event_data(event_data, description, is_diagnostic) uri = is_diagnostic ? @diagnostic_uri : @events_uri payload_id = is_diagnostic ? nil : SecureRandom.uuid - description = is_diagnostic ? 'diagnostic event' : "#{event_data.length} events" res = nil (0..1).each do |attempt| if attempt > 0 @@ -30,7 +29,7 @@ def send_event_data(event_data, is_diagnostic) end begin @client.start if !@client.started? - @logger.debug { "[LDClient] sending #{description}: #{body}" } + @logger.debug { "[LDClient] sending #{description}: #{event_data}" } req = Net::HTTP::Post.new(uri) req.content_type = "application/json" req.body = event_data diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 06db4f00..ed0a724e 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -56,7 +56,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) if @config.offline? || !@config.send_events @event_processor = NullEventProcessor.new else - @event_processor = EventProcessor.new(sdk_key, config, diagnostic_accumulator) + @event_processor = EventProcessor.new(sdk_key, config, nil, diagnostic_accumulator) end if @config.use_ldd? diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index e99761b7..0519aebb 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -27,7 +27,7 @@ def with_sender_and_server with_sender_and_server do |es, server| server.setup_ok_response("/bulk", "") - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be true expect(result.must_shutdown).to be false @@ -49,8 +49,8 @@ def with_sender_and_server with_sender_and_server do |es, server| server.setup_ok_response("/bulk", "") - result1 = es.send_event_data(fake_data, false) - result2 = es.send_event_data(fake_data, false) + result1 = es.send_event_data(fake_data, "", false) + result2 = es.send_event_data(fake_data, "", false) expect(result1.success).to be true expect(result2.success).to be true @@ -66,7 +66,7 @@ def with_sender_and_server with_sender_and_server do |es, server| server.setup_ok_response("/diagnostic", "") - result = es.send_event_data(fake_data, true) + result = es.send_event_data(fake_data, "", true) expect(result.success).to be true expect(result.must_shutdown).to be false @@ -94,7 +94,7 @@ def with_sender_and_server es = make_sender(server) - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be true @@ -116,7 +116,7 @@ def with_sender_and_server res.status = req_count == 2 ? 200 : status end - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be true expect(result.must_shutdown).to be false @@ -141,7 +141,7 @@ def with_sender_and_server res.status = req_count == 3 ? 200 : status end - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be false expect(result.must_shutdown).to be false @@ -164,7 +164,7 @@ def with_sender_and_server res.status = status end - result = es.send_event_data(fake_data, false) + result = es.send_event_data(fake_data, "", false) expect(result.success).to be false expect(result.must_shutdown).to be true diff --git a/spec/events_spec.rb b/spec/events_spec.rb index a36fa95f..c32eeb29 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -578,7 +578,7 @@ def initialize @diagnostic_payloads = Queue.new end - def send_event_data(data, is_diagnostic) + def send_event_data(data, description, is_diagnostic) (is_diagnostic ? @diagnostic_payloads : @analytics_payloads).push(JSON.parse(data, symbolize_names: true)) @result end diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb new file mode 100644 index 00000000..b93a98b4 --- /dev/null +++ b/spec/ldclient_end_to_end_spec.rb @@ -0,0 +1,123 @@ +require "http_util" +require "spec_helper" + + +SDK_KEY = "sdk-key" + +USER = { key: 'userkey' } + +ALWAYS_TRUE_FLAG = { key: 'flagkey', version: 1, on: false, offVariation: 1, variations: [ false, true ] } +DATA_WITH_ALWAYS_TRUE_FLAG = { + flags: { ALWAYS_TRUE_FLAG[:key ].to_sym => ALWAYS_TRUE_FLAG }, + segments: {} +} +PUT_EVENT_WITH_ALWAYS_TRUE_FLAG = "event: put\ndata:{\"data\":#{DATA_WITH_ALWAYS_TRUE_FLAG.to_json}}\n\n'" + +def with_client(config) + client = LaunchDarkly::LDClient.new(SDK_KEY, config) + begin + yield client + ensure + client.close + end +end + +module LaunchDarkly + # Note that we can't do end-to-end tests in streaming mode until we have a test server that can do streaming + # responses, which is difficult in WEBrick. + + describe "LDClient end-to-end" do + it "starts in polling mode" do + with_server do |poll_server| + poll_server.setup_ok_response("/sdk/latest-all", DATA_WITH_ALWAYS_TRUE_FLAG.to_json, "application/json") + + config = Config.new( + stream: false, + base_uri: poll_server.base_uri.to_s, + send_events: false, + logger: NullLogger.new + ) + with_client(config) do |client| + expect(client.initialized?).to be true + expect(client.variation(ALWAYS_TRUE_FLAG[:key], USER, false)).to be true + end + end + end + + it "fails in polling mode with 401 error" do + with_server do |poll_server| + poll_server.setup_status_response("/sdk/latest-all", 401) + + config = Config.new( + stream: false, + base_uri: poll_server.base_uri.to_s, + send_events: false, + logger: NullLogger.new + ) + with_client(config) do |client| + expect(client.initialized?).to be false + expect(client.variation(ALWAYS_TRUE_FLAG[:key], USER, false)).to be false + end + end + end + + it "sends event without diagnostics" do + with_server do |poll_server| + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") + + config = Config.new( + stream: false, + base_uri: poll_server.base_uri.to_s, + events_uri: events_server.base_uri.to_s, + diagnostic_opt_out: true, + logger: NullLogger.new + ) + with_client(config) do |client| + client.identify(USER) + client.flush + + req, body = events_server.await_request_with_body + expect(req.header['authorization']).to eq [ SDK_KEY ] + data = JSON.parse(body) + expect(data.length).to eq 1 + expect(data[0]["kind"]).to eq "identify" + end + end + end + end + + it "sends diagnostic event" do + with_server do |poll_server| + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + events_server.setup_ok_response("/diagnostic", "") + poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") + + config = Config.new( + stream: false, + base_uri: poll_server.base_uri.to_s, + events_uri: events_server.base_uri.to_s, + logger: NullLogger.new + ) + with_client(config) do |client| + user = { key: 'userkey' } + client.identify(user) + client.flush + + req0, body0 = events_server.await_request_with_body + req1, body1 = events_server.await_request_with_body + req = req0.path == "/diagnostic" ? req0 : req1 + body = req0.path == "/diagnostic" ? body0 : body1 + expect(req.header['authorization']).to eq [ SDK_KEY ] + data = JSON.parse(body) + expect(data["kind"]).to eq "diagnostic-init" + end + end + end + end + + # TODO: TLS tests with self-signed cert + end +end From 6f0e1e5ee54eddbc0fb7682d06d93ba3947b232a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 27 Apr 2020 14:13:05 -0700 Subject: [PATCH 155/182] remove install-time openssl check that breaks if you don't have rake --- ext/mkrf_conf.rb | 11 ----------- launchdarkly-server-sdk.gemspec | 1 - 2 files changed, 12 deletions(-) delete mode 100644 ext/mkrf_conf.rb diff --git a/ext/mkrf_conf.rb b/ext/mkrf_conf.rb deleted file mode 100644 index 23c2c7b6..00000000 --- a/ext/mkrf_conf.rb +++ /dev/null @@ -1,11 +0,0 @@ -require "rubygems" - - -# From http://stackoverflow.com/questions/5830835/how-to-add-openssl-dependency-to-gemspec -# the whole reason this file exists: to return an error if openssl -# isn't installed. -require "openssl" - -f = File.open(File.join(File.dirname(__FILE__), "Rakefile"), "w") # create dummy rakefile to indicate success -f.write("task :default\n") -f.close diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 237474ef..67b39daf 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -19,7 +19,6 @@ Gem::Specification.new do |spec| spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] - spec.extensions = 'ext/mkrf_conf.rb' spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" spec.add_development_dependency "bundler", "~> 1.7" From e35f8abbe8f256027a33125f0485b1307c79b9b8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 27 Apr 2020 15:12:03 -0700 Subject: [PATCH 156/182] treat comparison with wrong data type as a non-match, not an exception (#134) --- lib/ldclient-rb/evaluation.rb | 25 +++++++++++++++++-------- spec/evaluation_spec.rb | 12 ++++++------ 2 files changed, 23 insertions(+), 14 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index d0d2aa38..3c18e7ff 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -140,35 +140,44 @@ def self.comparator(converter) end, endsWith: lambda do |a, b| - (a.is_a? String) && (a.end_with? b) + (a.is_a? String) && (b.is_a? String) && (a.end_with? b) end, startsWith: lambda do |a, b| - (a.is_a? String) && (a.start_with? b) + (a.is_a? String) && (b.is_a? String) && (a.start_with? b) end, matches: lambda do |a, b| - (b.is_a? String) && !(Regexp.new b).match(a).nil? + if (b.is_a? String) && (b.is_a? String) + begin + re = Regexp.new b + !re.match(a).nil? + rescue + false + end + else + false + end end, contains: lambda do |a, b| - (a.is_a? String) && (a.include? b) + (a.is_a? String) && (b.is_a? String) && (a.include? b) end, lessThan: lambda do |a, b| - (a.is_a? Numeric) && (a < b) + (a.is_a? Numeric) && (b.is_a? Numeric) && (a < b) end, lessThanOrEqual: lambda do |a, b| - (a.is_a? Numeric) && (a <= b) + (a.is_a? Numeric) && (b.is_a? Numeric) && (a <= b) end, greaterThan: lambda do |a, b| - (a.is_a? Numeric) && (a > b) + (a.is_a? Numeric) && (b.is_a? Numeric) && (a > b) end, greaterThanOrEqual: lambda do |a, b| - (a.is_a? Numeric) && (a >= b) + (a.is_a? Numeric) && (b.is_a? Numeric) && (a >= b) end, before: comparator(DATE_OPERAND) { |n| n < 0 }, diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 14d5ed80..b8bed817 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -495,13 +495,13 @@ def boolean_flag_with_clauses(clauses) # mixed strings and numbers [ :in, "99", 99, false ], [ :in, 99, "99", false ], - #[ :contains, "99", 99, false ], # currently throws exception - would return false in Java SDK - #[ :startsWith, "99", 99, false ], # currently throws exception - would return false in Java SDK - #[ :endsWith, "99", 99, false ] # currently throws exception - would return false in Java SDK + [ :contains, "99", 99, false ], + [ :startsWith, "99", 99, false ], + [ :endsWith, "99", 99, false ], [ :lessThanOrEqual, "99", 99, false ], - #[ :lessThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK + [ :lessThanOrEqual, 99, "99", false ], [ :greaterThanOrEqual, "99", 99, false ], - #[ :greaterThanOrEqual, 99, "99", false ], # currently throws exception - would return false in Java SDK + [ :greaterThanOrEqual, 99, "99", false ], # regex [ :matches, "hello world", "hello.*rld", true ], @@ -509,7 +509,7 @@ def boolean_flag_with_clauses(clauses) [ :matches, "hello world", "l+", true ], [ :matches, "hello world", "(world|planet)", true ], [ :matches, "hello world", "aloha", false ], - #[ :matches, "hello world", "***not a regex", false ] # currently throws exception - same as Java SDK + [ :matches, "hello world", "***not a regex", false ], # dates [ :before, dateStr1, dateStr2, true ], From 3e55dc410280b7562ad6bdfbf05b70804e3b2272 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 27 Apr 2020 15:12:45 -0700 Subject: [PATCH 157/182] fail fast for nil SDK key when appropriate --- lib/ldclient-rb/events.rb | 1 + lib/ldclient-rb/ldclient.rb | 11 ++++++++++ spec/ldclient_spec.rb | 40 +++++++++++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 0b65f3d5..a5352a0b 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -91,6 +91,7 @@ class StopMessage < SynchronousMessage # @private class EventProcessor def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test_properties = nil) + raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key @logger = config.logger @inbox = SizedQueue.new(config.capacity < 100 ? 100 : config.capacity) @flush_task = Concurrent::TimerTask.new(execution_interval: config.flush_interval) do diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index ed0a724e..1dc0cc25 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -33,6 +33,16 @@ class LDClient # @return [LDClient] The LaunchDarkly client instance # def initialize(sdk_key, config = Config.default, wait_for_sec = 5) + # Note that sdk_key is normally a required parameter, and a nil value would cause the SDK to + # fail in most configurations. However, there are some configurations where it would be OK + # (offline = true, *or* we are using LDD mode or the file data source and events are disabled + # so we're not connecting to any LD services) so rather than try to check for all of those + # up front, we will let the constructors for the data source implementations implement this + # fail-fast as appropriate, and just check here for the part regarding events. + if !config.offline? && config.send_events + raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? + end + @sdk_key = sdk_key @event_factory_default = EventFactory.new(false) @@ -352,6 +362,7 @@ def create_default_data_source(sdk_key, config, diagnostic_accumulator) if config.offline? return NullUpdateProcessor.new end + raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key requestor = Requestor.new(sdk_key, config) if config.stream? StreamProcessor.new(sdk_key, config, requestor, diagnostic_accumulator) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 1d3bb506..40ce5a1d 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -49,6 +49,46 @@ def event_processor client.instance_variable_get(:@event_processor) end + describe "constructor requirement of non-nil sdk key" do + it "is not enforced when offline" do + subject.new(nil, offline_config) + end + + it "is not enforced if use_ldd is true and send_events is false" do + subject.new(nil, LaunchDarkly::Config.new({ use_ldd: true, send_events: false })) + end + + it "is not enforced if using file data and send_events is false" do + source = LaunchDarkly::FileDataSource.factory({}) + subject.new(nil, LaunchDarkly::Config.new({ data_source: source, send_events: false })) + end + + it "is enforced in streaming mode even if send_events is false" do + expect { + subject.new(nil, LaunchDarkly::Config.new({ send_events: false })) + }.to raise_error(ArgumentError) + end + + it "is enforced in polling mode even if send_events is false" do + expect { + subject.new(nil, LaunchDarkly::Config.new({ stream: false, send_events: false })) + }.to raise_error(ArgumentError) + end + + it "is enforced if use_ldd is true and send_events is true" do + expect { + subject.new(nil, LaunchDarkly::Config.new({ use_ldd: true })) + }.to raise_error(ArgumentError) + end + + it "is enforced if using file data and send_events is true" do + source = LaunchDarkly::FileDataSource.factory({}) + expect { + subject.new(nil, LaunchDarkly::Config.new({ data_source: source })) + }.to raise_error(ArgumentError) + end + end + describe '#variation' do feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, trackEvents: true, debugEventsUntilDate: 1000 } From ad7cd7f05c44aa353fb3d5a3f36130eb62a45187 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 4 May 2020 14:25:25 -0700 Subject: [PATCH 158/182] tolerate nil value for user.custom (#137) --- lib/ldclient-rb/user_filter.rb | 5 +++-- spec/events_spec.rb | 11 +++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/user_filter.rb b/lib/ldclient-rb/user_filter.rb index 8cbf67ca..b67f6844 100644 --- a/lib/ldclient-rb/user_filter.rb +++ b/lib/ldclient-rb/user_filter.rb @@ -15,8 +15,9 @@ def transform_user_props(user_props) user_private_attrs = Set.new((user_props[:privateAttributeNames] || []).map(&:to_sym)) filtered_user_props, removed = filter_values(user_props, user_private_attrs, ALLOWED_TOP_LEVEL_KEYS, IGNORED_TOP_LEVEL_KEYS) - if user_props.has_key?(:custom) - filtered_user_props[:custom], removed_custom = filter_values(user_props[:custom], user_private_attrs) + custom = user_props[:custom] + if !custom.nil? + filtered_user_props[:custom], removed_custom = filter_values(custom, user_private_attrs) removed.merge(removed_custom) end diff --git a/spec/events_spec.rb b/spec/events_spec.rb index c32eeb29..d7854567 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -408,6 +408,17 @@ def with_processor_and_sender(config) end end + it "treats nil value for custom the same as an empty hash" do + with_processor_and_sender(default_config) do |ep, sender| + user_with_nil_custom = { key: "userkey", custom: nil } + e = { kind: "identify", key: "userkey", user: user_with_nil_custom } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly(e) + end + end + it "does a final flush when shutting down" do with_processor_and_sender(default_config) do |ep, sender| e = { kind: "identify", key: user[:key], user: user } From 4ca5ad3581ab7da19a22ad192254d676fb1641c9 Mon Sep 17 00:00:00 2001 From: Jacob Smith Date: Wed, 27 May 2020 13:05:19 -0400 Subject: [PATCH 159/182] Only shutdown the Redis pool if it is owned by the SDK (#158) * Only shutdown a Redis pool created by SDK * Make pool shutdown behavior an option --- .../impl/integrations/redis_impl.rb | 3 ++ lib/ldclient-rb/integrations/redis.rb | 1 + lib/ldclient-rb/redis_store.rb | 1 + spec/redis_feature_store_spec.rb | 37 +++++++++++++++++-- 4 files changed, 38 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 107340f8..876f4240 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -33,6 +33,8 @@ def initialize(opts) @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do ::Redis.new(@redis_opts) end + # shutdown pool on close unless the client passed a custom pool and specified not to shutdown + @pool_shutdown_on_close = (!opts[:pool] || opts.fetch(:pool_shutdown_on_close, true)) @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix @logger = opts[:logger] || Config.default_logger @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented @@ -118,6 +120,7 @@ def initialized_internal? def stop if @stopped.make_true + return unless @pool_shutdown_on_close @pool.shutdown { |redis| redis.close } end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 7e447657..396c1b35 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -45,6 +45,7 @@ def self.default_prefix # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @option opts [Object] :pool custom connection pool, if desired + # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # def self.new_feature_store(opts) diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 48632411..b94e61f2 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -35,6 +35,7 @@ class RedisFeatureStore # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally # @option opts [Object] :pool custom connection pool, if desired + # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. # def initialize(opts = {}) core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index cf69f334..e3a179b1 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -1,3 +1,4 @@ +require "connection_pool" require "feature_store_spec_base" require "json" require "redis" @@ -27,11 +28,11 @@ def clear_all_data describe LaunchDarkly::RedisFeatureStore do subject { LaunchDarkly::RedisFeatureStore } - + break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a Redis instance running on the default port. - + context "real Redis with local cache" do include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) end @@ -59,7 +60,7 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve flag = { key: "foo", version: 1 } test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) store = create_redis_store({ test_hook: test_hook }) - + begin store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) @@ -77,7 +78,7 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve flag = { key: "foo", version: 1 } test_hook = make_concurrent_modifier_test_hook(other_client, flag, 3, 3) store = create_redis_store({ test_hook: test_hook }) - + begin store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) @@ -89,4 +90,32 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve other_client.close end end + + it "shuts down a custom Redis pool by default" do + unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } + store = create_redis_store({ pool: unowned_pool }) + + begin + store.init(LaunchDarkly::FEATURES => { }) + store.stop + + expect { unowned_pool.with {} }.to raise_error(ConnectionPool::PoolShuttingDownError) + ensure + unowned_pool.shutdown { |conn| conn.close } + end + end + + it "doesn't shut down a custom Redis pool if pool_shutdown_on_close = false" do + unowned_pool = ConnectionPool.new(size: 1, timeout: 1) { Redis.new({ url: "redis://localhost:6379" }) } + store = create_redis_store({ pool: unowned_pool, pool_shutdown_on_close: false }) + + begin + store.init(LaunchDarkly::FEATURES => { }) + store.stop + + expect { unowned_pool.with {} }.not_to raise_error(ConnectionPool::PoolShuttingDownError) + ensure + unowned_pool.shutdown { |conn| conn.close } + end + end end From 06d55d5d8935a303d041cf6003fc185d7ec747ae Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 27 May 2020 11:51:03 -0700 Subject: [PATCH 160/182] improve doc comment --- lib/ldclient-rb/integrations/redis.rb | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 396c1b35..22bad6ef 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -45,7 +45,9 @@ def self.default_prefix # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @option opts [Object] :pool custom connection pool, if desired - # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. + # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool; + # this is true by default, and should be set to false only if you are managing the pool yourself and want its + # lifecycle to be independent of the SDK client # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # def self.new_feature_store(opts) From 77bf917150e1fd2735fe906287e5316b1033b730 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 26 Jun 2020 18:37:52 -0700 Subject: [PATCH 161/182] remove support for indirect/patch and indirect/put (#138) --- lib/ldclient-rb/ldclient.rb | 4 ++-- lib/ldclient-rb/requestor.rb | 8 -------- lib/ldclient-rb/stream.rb | 25 +------------------------ spec/requestor_spec.rb | 34 ---------------------------------- spec/stream_spec.rb | 17 +---------------- 5 files changed, 4 insertions(+), 84 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1dc0cc25..7ea48345 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -363,12 +363,12 @@ def create_default_data_source(sdk_key, config, diagnostic_accumulator) return NullUpdateProcessor.new end raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key - requestor = Requestor.new(sdk_key, config) if config.stream? - StreamProcessor.new(sdk_key, config, requestor, diagnostic_accumulator) + StreamProcessor.new(sdk_key, config, diagnostic_accumulator) else config.logger.info { "Disabling streaming API" } config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } + requestor = Requestor.new(sdk_key, config) PollingProcessor.new(config, requestor) end end diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index eae0a193..378a1a35 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -26,14 +26,6 @@ def initialize(sdk_key, config) @cache = @config.cache_store end - def request_flag(key) - make_request("/sdk/latest-flags/" + key) - end - - def request_segment(key) - make_request("/sdk/latest-segments/" + key) - end - def request_all_data() make_request("/sdk/latest-all") end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index e27fad32..00791eb3 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -10,10 +10,6 @@ module LaunchDarkly # @private DELETE = :delete # @private - INDIRECT_PUT = :'indirect/put' - # @private - INDIRECT_PATCH = :'indirect/patch' - # @private READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes # @private @@ -24,11 +20,10 @@ module LaunchDarkly # @private class StreamProcessor - def initialize(sdk_key, config, requestor, diagnostic_accumulator = nil) + def initialize(sdk_key, config, diagnostic_accumulator = nil) @sdk_key = sdk_key @config = config @feature_store = config.feature_store - @requestor = requestor @initialized = Concurrent::AtomicBoolean.new(false) @started = Concurrent::AtomicBoolean.new(false) @stopped = Concurrent::AtomicBoolean.new(false) @@ -112,24 +107,6 @@ def process_message(message) break end end - elsif method == INDIRECT_PUT - all_data = @requestor.request_all_data - @feature_store.init({ - FEATURES => all_data[:flags], - SEGMENTS => all_data[:segments] - }) - @initialized.make_true - @config.logger.info { "[LDClient] Stream initialized (via indirect message)" } - elsif method == INDIRECT_PATCH - key = key_for_path(FEATURES, message.data) - if key - @feature_store.upsert(FEATURES, @requestor.request_flag(key)) - else - key = key_for_path(SEGMENTS, message.data) - if key - @feature_store.upsert(SEGMENTS, @requestor.request_segment(key)) - end - end else @config.logger.warn { "[LDClient] Unknown message received: #{method}" } end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 6833ea1f..6751517a 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -193,38 +193,4 @@ def with_requestor(base_uri, opts = {}) end end end - - describe "request_flag" do - it "uses expected URI and headers" do - with_server do |server| - with_requestor(server.base_uri.to_s) do |requestor| - server.setup_ok_response("/", "{}") - requestor.request_flag("key") - expect(server.requests.count).to eq 1 - expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-flags/key" - expect(server.requests[0].header).to include({ - "authorization" => [ $sdk_key ], - "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] - }) - end - end - end - end - - describe "request_segment" do - it "uses expected URI and headers" do - with_server do |server| - with_requestor(server.base_uri.to_s) do |requestor| - server.setup_ok_response("/", "{}") - requestor.request_segment("key") - expect(server.requests.count).to eq 1 - expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-segments/key" - expect(server.requests[0].header).to include({ - "authorization" => [ $sdk_key ], - "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] - }) - end - end - end - end end diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 648833ff..39c678c4 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -4,8 +4,7 @@ describe LaunchDarkly::StreamProcessor do subject { LaunchDarkly::StreamProcessor } let(:config) { LaunchDarkly::Config.new } - let(:requestor) { double() } - let(:processor) { subject.new("sdk_key", config, requestor) } + let(:processor) { subject.new("sdk_key", config) } describe '#process_message' do let(:put_message) { SSE::StreamEvent.new(:put, '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } @@ -13,8 +12,6 @@ let(:patch_seg_message) { SSE::StreamEvent.new(:patch, '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } let(:delete_flag_message) { SSE::StreamEvent.new(:delete, '{"path": "/flags/key", "version": 2}') } let(:delete_seg_message) { SSE::StreamEvent.new(:delete, '{"path": "/segments/key", "version": 2}') } - let(:indirect_patch_flag_message) { SSE::StreamEvent.new(:'indirect/patch', "/flags/key") } - let(:indirect_patch_segment_message) { SSE::StreamEvent.new(:'indirect/patch', "/segments/key") } it "will accept PUT methods" do processor.send(:process_message, put_message) @@ -39,18 +36,6 @@ processor.send(:process_message, delete_seg_message) expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) end - it "will accept INDIRECT PATCH method for flags" do - flag = { key: 'key', version: 1 } - allow(requestor).to receive(:request_flag).with(flag[:key]).and_return(flag) - processor.send(:process_message, indirect_patch_flag_message); - expect(config.feature_store.get(LaunchDarkly::FEATURES, flag[:key])).to eq(flag) - end - it "will accept INDIRECT PATCH method for segments" do - segment = { key: 'key', version: 1 } - allow(requestor).to receive(:request_segment).with(segment[:key]).and_return(segment) - processor.send(:process_message, indirect_patch_segment_message); - expect(config.feature_store.get(LaunchDarkly::SEGMENTS, segment[:key])).to eq(segment) - end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn processor.send(:process_message, SSE::StreamEvent.new(type: :get, data: "", id: nil)) From cf7c8a7830e72a18f6db24c72caa0b1e0a2619cd Mon Sep 17 00:00:00 2001 From: Elliot <35050275+Apache-HB@users.noreply.github.com> Date: Mon, 9 Nov 2020 12:46:49 -0800 Subject: [PATCH 162/182] update to json 2.3.1 (#139) * update json dep to 2.3.x to fix CVE --- Gemfile.lock | 99 +++++++++++++++------------------ launchdarkly-server-sdk.gemspec | 13 ++++- spec/spec_helper.rb | 3 - 3 files changed, 54 insertions(+), 61 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index c04d73aa..54bb3bc9 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -3,92 +3,81 @@ PATH specs: launchdarkly-server-sdk (5.8.0) concurrent-ruby (~> 1.0) - json (>= 1.8, < 3) + json (~> 2.3.1) ld-eventsource (= 1.0.3) semantic (~> 1.6) GEM remote: https://rubygems.org/ specs: - aws-eventstream (1.0.1) - aws-partitions (1.128.0) - aws-sdk-core (3.44.2) - aws-eventstream (~> 1.0) - aws-partitions (~> 1.0) - aws-sigv4 (~> 1.0) + aws-eventstream (1.1.0) + aws-partitions (1.388.0) + aws-sdk-core (3.109.1) + aws-eventstream (~> 1, >= 1.0.2) + aws-partitions (~> 1, >= 1.239.0) + aws-sigv4 (~> 1.1) jmespath (~> 1.0) - aws-sdk-dynamodb (1.19.0) - aws-sdk-core (~> 3, >= 3.39.0) - aws-sigv4 (~> 1.0) - aws-sigv4 (1.0.3) - codeclimate-test-reporter (0.6.0) - simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.1.6) - connection_pool (2.2.1) - diff-lcs (1.3) - diplomat (2.0.2) - faraday (~> 0.9) - json - docile (1.1.5) - faraday (0.15.4) + aws-sdk-dynamodb (1.55.0) + aws-sdk-core (~> 3, >= 3.109.0) + aws-sigv4 (~> 1.1) + aws-sigv4 (1.2.2) + aws-eventstream (~> 1, >= 1.0.2) + concurrent-ruby (1.1.7) + connection_pool (2.2.3) + deep_merge (1.2.1) + diff-lcs (1.4.4) + diplomat (2.4.2) + deep_merge (~> 1.0, >= 1.0.1) + faraday (>= 0.9, < 1.1.0) + faraday (0.17.3) multipart-post (>= 1.2, < 3) - ffi (1.9.25) - ffi (1.9.25-java) + ffi (1.12.0) hitimes (1.3.1) - hitimes (1.3.1-java) http_tools (0.4.5) jmespath (1.4.0) - json (1.8.6) - json (1.8.6-java) + json (2.3.1) ld-eventsource (1.0.3) concurrent-ruby (~> 1.0) http_tools (~> 0.4.5) socketry (~> 0.5.1) - listen (3.1.5) - rb-fsevent (~> 0.9, >= 0.9.4) - rb-inotify (~> 0.9, >= 0.9.7) - ruby_dep (~> 1.2) - multipart-post (2.0.0) - rb-fsevent (0.10.3) - rb-inotify (0.9.10) - ffi (>= 0.5.0, < 2) + listen (3.2.1) + rb-fsevent (~> 0.10, >= 0.10.3) + rb-inotify (~> 0.9, >= 0.9.10) + multipart-post (2.1.1) + rb-fsevent (0.10.4) + rb-inotify (0.10.1) + ffi (~> 1.0) redis (3.3.5) - rspec (3.7.0) - rspec-core (~> 3.7.0) - rspec-expectations (~> 3.7.0) - rspec-mocks (~> 3.7.0) - rspec-core (3.7.1) - rspec-support (~> 3.7.0) - rspec-expectations (3.7.0) + rspec (3.9.0) + rspec-core (~> 3.9.0) + rspec-expectations (~> 3.9.0) + rspec-mocks (~> 3.9.0) + rspec-core (3.9.3) + rspec-support (~> 3.9.3) + rspec-expectations (3.9.3) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.7.0) - rspec-mocks (3.7.0) + rspec-support (~> 3.9.0) + rspec-mocks (3.9.1) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.7.0) - rspec-support (3.7.0) + rspec-support (~> 3.9.0) + rspec-support (3.9.4) rspec_junit_formatter (0.3.0) rspec-core (>= 2, < 4, != 2.12.0) - ruby_dep (1.5.0) semantic (1.6.1) - simplecov (0.15.1) - docile (~> 1.1.0) - json (>= 1.8, < 3) - simplecov-html (~> 0.10.0) - simplecov-html (0.10.2) socketry (0.5.1) hitimes (~> 1.2) - timecop (0.9.1) + timecop (0.9.2) PLATFORMS - java ruby DEPENDENCIES aws-sdk-dynamodb (~> 1.18) - bundler (~> 1.7) - codeclimate-test-reporter (~> 0) + bundler (~> 1.17) connection_pool (>= 2.1.2) diplomat (>= 2.0.2) + faraday (~> 0.17) + ffi (<= 1.12) launchdarkly-server-sdk! listen (~> 3.0) redis (~> 3.3.5) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 67b39daf..b8493985 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -21,18 +21,25 @@ Gem::Specification.new do |spec| spec.require_paths = ["lib"] spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" - spec.add_development_dependency "bundler", "~> 1.7" + spec.add_development_dependency "bundler", "~> 1.17" spec.add_development_dependency "rspec", "~> 3.2" - spec.add_development_dependency "codeclimate-test-reporter", "~> 0" spec.add_development_dependency "diplomat", ">= 2.0.2" spec.add_development_dependency "redis", "~> 3.3.5" spec.add_development_dependency "connection_pool", ">= 2.1.2" spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0" spec.add_development_dependency "timecop", "~> 0.9.1" spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb + # these are transitive dependencies of listen and consul respectively + # we constrain them here to make sure the ruby 2.2, 2.3, and 2.4 CI + # cases all pass + spec.add_development_dependency "ffi", "<= 1.12" # >1.12 doesnt support ruby 2.2 + spec.add_development_dependency "faraday", "~> 0.17" # >=0.18 doesnt support ruby 2.2 - spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "ld-eventsource", "1.0.3" + + # lock json to 2.3.x as ruby libraries often remove + # support for older ruby versions in minor releases + spec.add_runtime_dependency "json", "~> 2.3.1" end diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index 52926ac1..8438ecc2 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -1,6 +1,3 @@ -require "codeclimate-test-reporter" -CodeClimate::TestReporter.start - require "ldclient-rb" $null_log = ::Logger.new($stdout) From 4cc6d9cd85d68e7d38994e65a2e00cb7752ee4d1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 6 Jan 2021 11:30:43 -0800 Subject: [PATCH 163/182] add publication of API docs on GitHub Pages (#143) --- .gitignore | 2 +- .ldrelease/build-docs.sh | 18 ++++++++++++++++++ .ldrelease/config.yml | 13 +++++++++---- .yardopts | 9 --------- README.md | 4 +++- docs/Makefile | 26 ++++++++++++++++++++++++++ docs/index.md | 9 +++++++++ scripts/gendocs.sh | 11 ----------- scripts/release.sh | 27 --------------------------- 9 files changed, 66 insertions(+), 53 deletions(-) create mode 100755 .ldrelease/build-docs.sh delete mode 100644 .yardopts create mode 100644 docs/Makefile create mode 100644 docs/index.md delete mode 100755 scripts/gendocs.sh delete mode 100755 scripts/release.sh diff --git a/.gitignore b/.gitignore index 3f9d02f2..9e998e64 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,7 @@ /.yardoc /_yardoc/ /coverage/ -/doc/ +/docs/build /pkg/ /spec/reports/ /tmp/ diff --git a/.ldrelease/build-docs.sh b/.ldrelease/build-docs.sh new file mode 100755 index 00000000..3b581297 --- /dev/null +++ b/.ldrelease/build-docs.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +# doc generation is not part of Releaser's standard Ruby project template + +mkdir -p ./artifacts/ + +cd ./docs +make +cd .. + +# Releaser will pick up docs generated in CI if we put an archive of them in the +# artifacts directory and name it docs.tar.gz or docs.zip. They will be uploaded +# to GitHub Pages and also attached as release artifacts. There's no separate +# "publish-docs" step because the external service that also hosts them doesn't +# require an upload, it just picks up gems automatically. + +cd ./docs/build/html +tar cfz ../../../artifacts/docs.tar.gz * diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index 198d0ebc..b900daf1 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -8,10 +8,15 @@ publications: - url: https://www.rubydoc.info/gems/launchdarkly-server-sdk description: documentation -template: - name: ruby - env: - LD_SKIP_DATABASE_TESTS: 1 # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI +circleci: + linux: + image: circleci/ruby:2.6.2-stretch + context: org-global + env: + LD_SKIP_DATABASE_TESTS: 1 # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI + +documentation: + githubPages: true sdk: displayName: "Ruby" diff --git a/.yardopts b/.yardopts deleted file mode 100644 index 5388ac50..00000000 --- a/.yardopts +++ /dev/null @@ -1,9 +0,0 @@ ---no-private ---markup markdown ---embed-mixins -lib/*.rb -lib/**/*.rb -lib/**/**/*.rb -lib/**/**/**/*.rb -- -README.md diff --git a/README.md b/README.md index d3f99b69..bc6cf21d 100644 --- a/README.md +++ b/README.md @@ -5,6 +5,8 @@ LaunchDarkly Server-side SDK for Ruby [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) [![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) +[![RubyDoc](https://img.shields.io/static/v1?label=docs+-+all+versions&message=reference&color=00add8)](https://www.rubydoc.info/gems/launchdarkly-server-sdk) +[![GitHub Pages](https://img.shields.io/static/v1?label=docs+-+latest&message=reference&color=00add8)](https://launchdarkly.github.io/ruby-server-sdk) LaunchDarkly overview ------------------------- @@ -27,7 +29,7 @@ Learn more Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). -Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/launchdarkly-server-sdk). +Generated API documentation for all versions of the SDK is on [RubyDoc.info](https://www.rubydoc.info/gems/launchdarkly-server-sdk). The API documentation for the latest version is also on [GitHub Pages](https://launchdarkly.github.io/ruby-server-sdk). Testing ------- diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..86a33602 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,26 @@ + +ifeq ($(LD_RELEASE_VERSION),) +TITLE=LaunchDarkly Ruby SDK +else +TITLE=LaunchDarkly Ruby SDK ($(LD_RELEASE_VERSION)) +endif + +.PHONY: dependencies html + +html: dependencies + rm -rf ./build + cd .. && yard doc \ + -o docs/build/html \ + --title "$(TITLE)" \ + --no-private \ + --markup markdown \ + --embed-mixins \ + -r docs/index.md \ + lib/*.rb \ + lib/**/*.rb \ + lib/**/**/*.rb \ + lib/**/**/**/*.rb + +dependencies: + gem install --conservative yard + gem install --conservative redcarpet # provides Markdown formatting diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..4ab76d21 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,9 @@ +# LaunchDarkly Server-side SDK for Ruby + +This generated API documentation lists all types and methods in the SDK. + +The API documentation for the most recent SDK release is hosted on [GitHub Pages](https://launchdarkly.github.io/ruby-server-sdk). API documentation for current and past releases is hosted on [RubyDoc.info](https://www.rubydoc.info/gems/launchdarkly-server-sdk). + +Source code and readme: [GitHub](https://github.com/launchdarkly/ruby-server-sdk) + +SDK reference guide: [docs.launchdarkly.com](https://docs.launchdarkly.com/sdk/server-side/ruby) diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh deleted file mode 100755 index c5ec7dcf..00000000 --- a/scripts/gendocs.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -# Use this script to generate documentation locally in ./doc so it can be proofed before release. -# After release, documentation will be visible at https://www.rubydoc.info/gems/launchdarkly-server-sdk - -gem install --conservative yard -gem install --conservative redcarpet # provides Markdown formatting - -rm -rf doc/* - -yard doc diff --git a/scripts/release.sh b/scripts/release.sh deleted file mode 100755 index 9813240c..00000000 --- a/scripts/release.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash -# This script updates the version for the launchdarkly-server-sdk library and releases it to RubyGems -# It will only work if you have the proper credentials set up in ~/.gem/credentials - -# It takes exactly one argument: the new version. -# It should be run from the root of this git repo like this: -# ./scripts/release.sh 4.0.9 - -# When done you should commit and push the changes made. - -set -uxe -echo "Starting ruby-server-sdk release." - -VERSION=$1 - -#Update version in lib/ldclient-rb/version.rb -VERSION_RB_TEMP=./version.rb.tmp -sed "s/VERSION =.*/VERSION = \"${VERSION}\"/g" lib/ldclient-rb/version.rb > ${VERSION_RB_TEMP} -mv ${VERSION_RB_TEMP} lib/ldclient-rb/version.rb - -# Build Ruby Gem -gem build launchdarkly-server-sdk.gemspec - -# Publish Ruby Gem -gem push launchdarkly-server-sdk-${VERSION}.gem - -echo "Done with ruby-server-sdk release" \ No newline at end of file From 5831aa66c89e3231dc7e0d28b99161881049f947 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 19 Jan 2021 16:24:17 -0800 Subject: [PATCH 164/182] try fixing release metadata --- .ldrelease/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index b900daf1..fa4a0557 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -13,7 +13,7 @@ circleci: image: circleci/ruby:2.6.2-stretch context: org-global env: - LD_SKIP_DATABASE_TESTS: 1 # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI + LD_SKIP_DATABASE_TESTS: "1" # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI documentation: githubPages: true From 3214f713c49078d935bcae02c555b5288243ffdd Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Tue, 19 Jan 2021 17:42:47 -0800 Subject: [PATCH 165/182] update the default base url (#144) --- lib/ldclient-rb/config.rb | 6 +++--- lib/ldclient-rb/file_data_source.rb | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 4a3116f3..211a20c7 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -15,7 +15,7 @@ class Config # # @param opts [Hash] the configuration options # @option opts [Logger] :logger See {#logger}. - # @option opts [String] :base_uri ("https://app.launchdarkly.com") See {#base_uri}. + # @option opts [String] :base_uri ("https://sdk.launchdarkly.com") See {#base_uri}. # @option opts [String] :stream_uri ("https://stream.launchdarkly.com") See {#stream_uri}. # @option opts [String] :events_uri ("https://events.launchdarkly.com") See {#events_uri}. # @option opts [Integer] :capacity (10000) See {#capacity}. @@ -314,10 +314,10 @@ def self.default_capacity # # The default value for {#base_uri}. - # @return [String] "https://app.launchdarkly.com" + # @return [String] "https://sdk.launchdarkly.com" # def self.default_base_uri - "https://app.launchdarkly.com" + "https://sdk.launchdarkly.com" end # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 6cc0dc39..76a7c226 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -51,7 +51,7 @@ def self.have_listen? # output as the starting point for your file. In Linux you would do this: # # ``` - # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all + # curl -H "Authorization: YOUR_SDK_KEY" https://sdk.launchdarkly.com/sdk/latest-all # ``` # # The output will look something like this (but with many more properties): From 557d2c497f92e227cc791c0cac87ce8f500af867 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 21 Jan 2021 10:56:29 -0800 Subject: [PATCH 166/182] revert renames of feature_store & update_processor --- lib/ldclient-rb/config.rb | 36 +++++---- lib/ldclient-rb/file_data_source.rb | 8 +- .../impl/integrations/consul_impl.rb | 10 +-- .../impl/integrations/dynamodb_impl.rb | 8 +- .../impl/integrations/redis_impl.rb | 14 ++-- lib/ldclient-rb/impl/store_client_wrapper.rb | 8 +- lib/ldclient-rb/impl/store_data_set_sorter.rb | 6 +- lib/ldclient-rb/in_memory_store.rb | 10 +-- lib/ldclient-rb/integrations/consul.rb | 12 +-- lib/ldclient-rb/integrations/dynamodb.rb | 20 ++--- lib/ldclient-rb/integrations/redis.rb | 22 +++--- .../integrations/util/store_wrapper.rb | 24 +++--- lib/ldclient-rb/interfaces.rb | 16 ++-- lib/ldclient-rb/ldclient.rb | 20 ++--- lib/ldclient-rb/polling.rb | 2 +- lib/ldclient-rb/redis_store.rb | 16 ++-- lib/ldclient-rb/stream.rb | 8 +- ...pec_base.rb => feature_store_spec_base.rb} | 4 +- spec/file_data_source_spec.rb | 2 +- spec/in_memory_data_store_spec.rb | 12 --- spec/in_memory_feature_store_spec.rb | 12 +++ ...e_spec.rb => consul_feature_store_spec.rb} | 12 +-- ...spec.rb => dynamodb_feature_store_spec.rb} | 12 +-- spec/ldclient_spec.rb | 76 +++++++++---------- spec/polling_spec.rb | 14 ++-- spec/redis_feature_store_spec.rb | 14 ++-- spec/stream_spec.rb | 12 +-- 27 files changed, 210 insertions(+), 200 deletions(-) rename spec/{data_store_spec_base.rb => feature_store_spec_base.rb} (97%) delete mode 100644 spec/in_memory_data_store_spec.rb create mode 100644 spec/in_memory_feature_store_spec.rb rename spec/integrations/{consul_data_store_spec.rb => consul_feature_store_spec.rb} (63%) rename spec/integrations/{dynamodb_data_store_spec.rb => dynamodb_feature_store_spec.rb} (85%) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 211a20c7..df0c73b4 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -23,7 +23,7 @@ class Config # @option opts [Float] :read_timeout (10) See {#read_timeout}. # @option opts [Float] :connect_timeout (2) See {#connect_timeout}. # @option opts [Object] :cache_store See {#cache_store}. - # @option opts [Object] :data_store See {#data_store}. + # @option opts [Object] :feature_store See {#feature_store}. # @option opts [Boolean] :use_ldd (false) See {#use_ldd?}. # @option opts [Boolean] :offline (false) See {#offline?}. # @option opts [Float] :poll_interval (30) See {#poll_interval}. @@ -35,6 +35,8 @@ class Config # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. # @option opts [Object] :data_source See {#data_source}. + # @option opts [Object] :update_processor Obsolete synonym for `data_source`. + # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. # @option opts [Boolean] :diagnostic_opt_out (false) See {#diagnostic_opt_out?}. # @option opts [Float] :diagnostic_recording_interval (900) See {#diagnostic_recording_interval}. # @option opts [String] :wrapper_name See {#wrapper_name}. @@ -50,7 +52,7 @@ def initialize(opts = {}) @flush_interval = opts[:flush_interval] || Config.default_flush_interval @connect_timeout = opts[:connect_timeout] || Config.default_connect_timeout @read_timeout = opts[:read_timeout] || Config.default_read_timeout - @data_store = opts[:data_store] || Config.default_data_store + @feature_store = opts[:feature_store] || Config.default_feature_store @stream = opts.has_key?(:stream) ? opts[:stream] : Config.default_stream @use_ldd = opts.has_key?(:use_ldd) ? opts[:use_ldd] : Config.default_use_ldd @offline = opts.has_key?(:offline) ? opts[:offline] : Config.default_offline @@ -61,7 +63,9 @@ def initialize(opts = {}) @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false - @data_source = opts[:data_source] + @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] + @update_processor = opts[:update_processor] + @update_processor_factory = opts[:update_processor_factory] @diagnostic_opt_out = opts.has_key?(:diagnostic_opt_out) && opts[:diagnostic_opt_out] @diagnostic_recording_interval = opts.has_key?(:diagnostic_recording_interval) && opts[:diagnostic_recording_interval] > Config.minimum_diagnostic_recording_interval ? opts[:diagnostic_recording_interval] : Config.default_diagnostic_recording_interval @@ -103,9 +107,9 @@ def stream? # # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, the client does not # use polling or streaming to get feature flag updates from the server, but instead reads them - # from the {#data_store data store}, which is assumed to be a database that is populated by + # from the {#feature_store feature store}, which is assumed to be a database that is populated by # a LaunchDarkly relay proxy. For more information, see ["The relay proxy"](https://docs.launchdarkly.com/v2.0/docs/the-relay-proxy) - # and ["Using a persistent data store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # and ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # All other properties related to streaming or polling are ignored if this option is set to true. # @@ -181,13 +185,13 @@ def offline? # # A store for feature flags and related data. The client uses it to store all data received # from LaunchDarkly, and uses the last stored data when evaluating flags. Defaults to - # {InMemoryDataStore}; for other implementations, see {LaunchDarkly::Integrations}. + # {InMemoryFeatureStore}; for other implementations, see {LaunchDarkly::Integrations}. # - # For more information, see ["Using a persistent data store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # For more information, see ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # - # @return [LaunchDarkly::Interfaces::DataStore] + # @return [LaunchDarkly::Interfaces::FeatureStore] # - attr_reader :data_store + attr_reader :feature_store # # True if all user attributes (other than the key) should be considered private. This means @@ -256,6 +260,12 @@ def offline? # attr_reader :data_source + # @deprecated This is replaced by {#data_source}. + attr_reader :update_processor + + # @deprecated This is replaced by {#data_source}. + attr_reader :update_processor_factory + # # Set to true to opt out of sending diagnostics data. # @@ -399,11 +409,11 @@ def self.default_use_ldd end # - # The default value for {#data_store}. - # @return [LaunchDarkly::Interfaces::DataStore] an {InMemoryDataStore} + # The default value for {#feature_store}. + # @return [LaunchDarkly::Interfaces::FeatureStore] an {InMemoryFeatureStore} # - def self.default_data_store - InMemoryDataStore.new + def self.default_feature_store + InMemoryFeatureStore.new end # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 76a7c226..f58ddf7c 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -118,14 +118,14 @@ class FileDataSource # @return an object that can be stored in {Config#data_source} # def self.factory(options={}) - return lambda { |sdk_key, config| FileDataSourceImpl.new(config.data_store, config.logger, options) } + return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) } end end # @private class FileDataSourceImpl - def initialize(data_store, logger, options={}) - @data_store = data_store + def initialize(feature_store, logger, options={}) + @feature_store = feature_store @logger = logger @paths = options[:paths] || [] if @paths.is_a? String @@ -187,7 +187,7 @@ def load_all return end end - @data_store.init(all_data) + @feature_store.init(all_data) @initialized.make_true end diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 34aea72c..2f186dab 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -5,9 +5,9 @@ module Impl module Integrations module Consul # - # Internal implementation of the Consul data store, intended to be used with CachingStoreWrapper. + # Internal implementation of the Consul feature store, intended to be used with CachingStoreWrapper. # - class ConsulDataStoreCore + class ConsulFeatureStoreCore begin require "diplomat" CONSUL_ENABLED = true @@ -17,14 +17,14 @@ class ConsulDataStoreCore def initialize(opts) if !CONSUL_ENABLED - raise RuntimeError.new("can't use Consul data store without the 'diplomat' gem") + raise RuntimeError.new("can't use Consul feature store without the 'diplomat' gem") end @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? Diplomat.configuration.url = opts[:url] if !opts[:url].nil? - @logger.info("ConsulDataStore: using Consul host at #{Diplomat.configuration.url}") + @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") end def init_internal(all_data) @@ -90,7 +90,7 @@ def upsert_internal(kind, new_item) else old_item = Model.deserialize(kind, old_value[0]["Value"]) # Check whether the item is stale. If so, don't do the update (and return the existing item to - # DataStoreWrapper so it can be cached) + # FeatureStoreWrapper so it can be cached) if old_item[:version] >= new_item[:version] return old_item end diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index fb3a6bd4..464eb5e4 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -5,9 +5,9 @@ module Impl module Integrations module DynamoDB # - # Internal implementation of the DynamoDB data store, intended to be used with CachingStoreWrapper. + # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. # - class DynamoDBDataStoreCore + class DynamoDBFeatureStoreCore begin require "aws-sdk-dynamodb" AWS_SDK_ENABLED = true @@ -28,7 +28,7 @@ class DynamoDBDataStoreCore def initialize(table_name, opts) if !AWS_SDK_ENABLED - raise RuntimeError.new("can't use DynamoDB data store without the aws-sdk or aws-sdk-dynamodb gem") + raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem") end @table_name = table_name @@ -41,7 +41,7 @@ def initialize(table_name, opts) @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end - @logger.info("DynamoDBDataStore: using DynamoDB table \"#{table_name}\"") + @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") end def init_internal(all_data) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 4602fcd7..a4cb1365 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -6,9 +6,9 @@ module Impl module Integrations module Redis # - # Internal implementation of the Redis data store, intended to be used with CachingStoreWrapper. + # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. # - class RedisDataStoreCore + class RedisFeatureStoreCore begin require "redis" require "connection_pool" @@ -19,7 +19,7 @@ class RedisDataStoreCore def initialize(opts) if !REDIS_ENABLED - raise RuntimeError.new("can't use Redis data store because one of these gems is missing: redis, connection_pool") + raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool") end @redis_opts = opts[:redis_opts] || Hash.new @@ -42,7 +42,7 @@ def initialize(opts) @stopped = Concurrent::AtomicBoolean.new(false) with_connection do |redis| - @logger.info("RedisDataStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ and prefix: #{@prefix}") end end @@ -61,7 +61,7 @@ def init_internal(all_data) multi.set(inited_key, inited_key) end end - @logger.info { "RedisDataStore: initialized with #{count} items" } + @logger.info { "RedisFeatureStore: initialized with #{count} items" } end def get_internal(kind, key) @@ -97,13 +97,13 @@ def upsert_internal(kind, new_item) multi.hset(base_key, key, Model.serialize(kind, new_item)) end if result.nil? - @logger.debug { "RedisDataStore: concurrent modification detected, retrying" } + @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } try_again = true end else final_item = old_item action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisDataStore: attempted to #{action} #{key} version: #{old_item[:version]} \ + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } end redis.unwatch diff --git a/lib/ldclient-rb/impl/store_client_wrapper.rb b/lib/ldclient-rb/impl/store_client_wrapper.rb index 8c3160f1..f0948251 100644 --- a/lib/ldclient-rb/impl/store_client_wrapper.rb +++ b/lib/ldclient-rb/impl/store_client_wrapper.rb @@ -4,19 +4,19 @@ module LaunchDarkly module Impl # - # Provides additional behavior that the client requires before or after data store operations. + # Provides additional behavior that the client requires before or after feature store operations. # Currently this just means sorting the data set for init(). In the future we may also use this # to provide an update listener capability. # - class DataStoreClientWrapper - include Interfaces::DataStore + class FeatureStoreClientWrapper + include Interfaces::FeatureStore def initialize(store) @store = store end def init(all_data) - @store.init(DataStoreDataSetSorter.sort_all_collections(all_data)) + @store.init(FeatureStoreDataSetSorter.sort_all_collections(all_data)) end def get(kind, key) diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index 6dad1b36..4454fe75 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -2,10 +2,10 @@ module LaunchDarkly module Impl # - # Implements a dependency graph ordering for data to be stored in a data store. We must use this - # on every data set that will be passed to the data store's init() method. + # Implements a dependency graph ordering for data to be stored in a feature store. We must use this + # on every data set that will be passed to the feature store's init() method. # - class DataStoreDataSetSorter + class FeatureStoreDataSetSorter # # Returns a copy of the input hash that has the following guarantees: the iteration order of the outer # hash will be in ascending order by the VersionDataKind's :priority property (if any), and for each diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index d3bee07e..576d90c7 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -2,12 +2,12 @@ module LaunchDarkly - # These constants denote the types of data that can be stored in the data store. If + # These constants denote the types of data that can be stored in the feature store. If # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. # - # The :priority and :get_dependency_keys properties are used by DataStoreDataSetSorter + # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter # to ensure data consistency during non-atomic updates. # @private @@ -24,12 +24,12 @@ module LaunchDarkly }.freeze # - # Default implementation of the LaunchDarkly client's data store, using an in-memory + # Default implementation of the LaunchDarkly client's feature store, using an in-memory # cache. This object holds feature flags and related data received from LaunchDarkly. # Database-backed implementations are available in {LaunchDarkly::Integrations}. # - class InMemoryDataStore - include LaunchDarkly::Interfaces::DataStore + class InMemoryFeatureStore + include LaunchDarkly::Interfaces::FeatureStore def initialize @items = Hash.new diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 0ecf69f8..4f32d5fd 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -5,7 +5,7 @@ module LaunchDarkly module Integrations module Consul # - # Default value for the `prefix` option for {new_data_store}. + # Default value for the `prefix` option for {new_feature_store}. # # @return [String] the default key prefix # @@ -14,10 +14,10 @@ def self.default_prefix end # - # Creates a Consul-backed persistent data store. + # Creates a Consul-backed persistent feature store. # # To use this method, you must first install the gem `diplomat`. Then, put the object returned by - # this method into the `data_store` property of your client configuration ({LaunchDarkly::Config}). + # this method into the `feature_store` property of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default @@ -27,10 +27,10 @@ def self.default_prefix # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @return [LaunchDarkly::Interfaces::DataStore] a data store object + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def self.new_data_store(opts, &block) - core = LaunchDarkly::Impl::Integrations::Consul::ConsulDataStoreCore.new(opts) + def self.new_feature_store(opts, &block) + core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts) return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end end diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index dddf38f0..189e118f 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -5,17 +5,17 @@ module LaunchDarkly module Integrations module DynamoDB # - # Creates a DynamoDB-backed persistent data store. For more details about how and why you can - # use a persistent data store, see the + # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or - # the full `aws-sdk`. Then, put the object returned by this method into the `data_store` property + # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property # of your client configuration ({LaunchDarkly::Config}). # - # @example Configuring the data store - # store = LaunchDarkly::Integrations::DynamoDB::new_data_store("my-table-name") - # config = LaunchDarkly::Config.new(data_store: store) + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::DynamoDB::new_feature_store("my-table-name") + # config = LaunchDarkly::Config.new(feature_store: store) # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # Note that the specified table must already exist in DynamoDB. It must have a partition key called @@ -31,15 +31,15 @@ module DynamoDB # @param table_name [String] name of an existing DynamoDB table # @param opts [Hash] the configuration options # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) - # @option opts [Object] :existing_client an already-constructed DynamoDB client for the data store to use + # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @return [LaunchDarkly::Interfaces::DataStore] a data store object + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def self.new_data_store(table_name, opts) - core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBDataStoreCore.new(table_name, opts) + def self.new_feature_store(table_name, opts) + core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 89b740a1..22bad6ef 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -4,7 +4,7 @@ module LaunchDarkly module Integrations module Redis # - # Default value for the `redis_url` option for {new_data_store}. This points to an instance of + # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of # Redis running at `localhost` with its default port. # # @return [String] the default Redis URL @@ -14,7 +14,7 @@ def self.default_redis_url end # - # Default value for the `prefix` option for {new_data_store}. + # Default value for the `prefix` option for {new_feature_store}. # # @return [String] the default key prefix # @@ -23,17 +23,17 @@ def self.default_prefix end # - # Creates a Redis-backed persistent data store. For more details about how and why you can - # use a persistent data store, see the + # Creates a Redis-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, - # put the object returned by this method into the `data_store` property of your + # put the object returned by this method into the `feature_store` property of your # client configuration. # - # @example Configuring the data store - # store = LaunchDarkly::Integrations::Redis::new_data_store(redis_url: "redis://my-server") - # config = LaunchDarkly::Config.new(data_store: store) + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::Redis::new_feature_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(feature_store: store) # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # @param opts [Hash] the configuration options @@ -48,10 +48,10 @@ def self.default_prefix # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool; # this is true by default, and should be set to false only if you are managing the pool yourself and want its # lifecycle to be independent of the SDK client - # @return [LaunchDarkly::Interfaces::DataStore] a data store object + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def self.new_data_store(opts) - return RedisDataStore.new(opts) + def self.new_feature_store(opts) + return RedisFeatureStore.new(opts) end end end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index c9ff5bcf..26318d67 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -6,22 +6,22 @@ module LaunchDarkly module Integrations module Util # - # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::DataStore} + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} # pattern that delegates part of its behavior to another object, while providing optional caching - # behavior and other logic that would otherwise be repeated in every data store implementation. + # behavior and other logic that would otherwise be repeated in every feature store implementation. # This makes it easier to create new database integrations by implementing only the database-specific # logic. # - # The mixin {DataStoreCore} describes the methods that need to be supported by the inner + # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner # implementation object. # class CachingStoreWrapper - include LaunchDarkly::Interfaces::DataStore + include LaunchDarkly::Interfaces::FeatureStore # # Creates a new store wrapper instance. # - # @param core [Object] an object that implements the {DataStoreCore} methods + # @param core [Object] an object that implements the {FeatureStoreCore} methods # @param opts [Hash] a hash that may include cache-related options; all others will be ignored # @option opts [Float] :expiration (15) cache TTL; zero means no caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache @@ -146,9 +146,9 @@ def items_if_not_deleted(items) # This module describes the methods that you must implement on your own object in order to # use {CachingStoreWrapper}. # - module DataStoreCore + module FeatureStoreCore # - # Initializes the store. This is the same as {LaunchDarkly::Interfaces::DataStore#init}, + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, # but the wrapper will take care of updating the cache if caching is enabled. # # If possible, the store should update the entire data set atomically. If that is not possible, @@ -164,7 +164,7 @@ def init_internal(all_data) end # - # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::DataStore#get} + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} # except that 1. the wrapper will take care of filtering out deleted entities by checking the # `:deleted` property, so you can just return exactly what was in the data store, and 2. the # wrapper will take care of checking and updating the cache if caching is enabled. @@ -177,7 +177,7 @@ def get_internal(kind, key) end # - # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::DataStore#all} + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} # except that 1. the wrapper will take care of filtering out deleted entities by checking the # `:deleted` property, so you can just return exactly what was in the data store, and 2. the # wrapper will take care of checking and updating the cache if caching is enabled. @@ -190,13 +190,13 @@ def get_all_internal(kind) end # - # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::DataStore#upsert} + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. # the method is expected to return the final state of the entity (i.e. either the `item` # parameter if the update succeeded, or the previously existing entity in the store if the # update failed; this is used for the caching logic). # - # Note that DataStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. # # @param kind [Object] the kind of entity to add or update @@ -208,7 +208,7 @@ def upsert_internal(kind, item) # # Checks whether this store has been initialized. This is the same as - # {LaunchDarkly::Interfaces::DataStore#initialized?} except that there is less of a concern + # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern # for efficiency, because the wrapper will use caching and memoization in order to call the method # as little as possible. # diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 36bdcd94..d2a9f862 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -5,13 +5,13 @@ module LaunchDarkly # module Interfaces # - # Mixin that defines the required methods of a data store implementation. The LaunchDarkly - # client uses the data store to persist feature flags and related objects received from + # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly + # client uses the feature store to persist feature flags and related objects received from # the LaunchDarkly service. Implementations must support concurrent access and updates. - # For more about how data stores can be used, see: - # [Using a persistent data store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # For more about how feature stores can be used, see: + # [Using a persistent feature store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # - # An entity that can be stored in a data store is a hash that can be converted to and from + # An entity that can be stored in a feature store is a hash that can be converted to and from # JSON, and that has at a minimum the following properties: `:key`, a string that is unique # among entities of the same kind; `:version`, an integer that is higher for newer data; # `:deleted`, a boolean (optional, defaults to false) that if true means this is a @@ -22,12 +22,12 @@ module Interfaces # `:namespace`, which is a short string unique to that kind. This string can be used as a # collection name or a key prefix. # - # The default implementation is {LaunchDarkly::InMemoryDataStore}. Several implementations + # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new # implementation, see {LaunchDarkly::Integrations::Util} for tools that can make this task # simpler. # - module DataStore + module FeatureStore # # Initializes (or re-initializes) the store with the specified set of entities. Any # existing entries will be removed. Implementations can assume that this data set is up to @@ -116,7 +116,7 @@ def stop # # Mixin that defines the required methods of a data source implementation. This is the # component that delivers feature flag data from LaunchDarkly to the LDClient by putting - # the data in the {DataStore}. It is expected to run concurrently on its own thread. + # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. # # The client has its own standard implementation, which uses either a streaming connection or # polling depending on your configuration. Normally you will not need to use another one diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 6b78f0f8..cfa63351 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -48,13 +48,13 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @event_factory_default = EventFactory.new(false) @event_factory_with_reasons = EventFactory.new(true) - # We need to wrap the data store object with a DataStoreClientWrapper in order to add + # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses - # the data store through the Config object, so we need to make a new Config that uses + # the feature store through the Config object, so we need to make a new Config that uses # the wrapped store. - @store = Impl::DataStoreClientWrapper.new(config.data_store) + @store = Impl::FeatureStoreClientWrapper.new(config.feature_store) updated_config = config.clone - updated_config.instance_variable_set(:@data_store, @store) + updated_config.instance_variable_set(:@feature_store, @store) @config = updated_config get_flag = lambda { |key| @store.get(FEATURES, key) } @@ -75,7 +75,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) if @config.use_ldd? @config.logger.info { "[LDClient] Started LaunchDarkly Client in LDD mode" } - return # requestor and data processor are not used in this mode + return # requestor and update processor are not used in this mode end data_source_or_factory = @config.data_source || self.method(:create_default_data_source) @@ -150,7 +150,7 @@ def secure_mode_hash(user) # given up permanently (for instance, if your SDK key is invalid). In the meantime, # any call to {#variation} or {#variation_detail} will behave as follows: # - # 1. It will check whether the data store already contains data (that is, you + # 1. It will check whether the feature store already contains data (that is, you # are using a database-backed store and it was populated by a previous run of this # application). If so, it will use the last known feature flag data. # @@ -365,7 +365,7 @@ def close def create_default_data_source(sdk_key, config, diagnostic_accumulator) if config.offline? - return NullDataSource.new + return NullUpdateProcessor.new end raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key if config.stream? @@ -386,9 +386,9 @@ def evaluate_internal(key, user, default, event_factory) if !initialized? if @store.initialized? - @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from data store" } + @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else - @config.logger.error { "[LDClient] Client has not finished initializing; data store unavailable, returning default value" } + @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } detail = Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail @@ -443,7 +443,7 @@ def sanitize_user(user) # Used internally when the client is offline. # @private # - class NullDataSource + class NullUpdateProcessor def start e = Concurrent::Event.new e.set diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 5cbc220a..a9312413 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -37,7 +37,7 @@ def stop def poll all_data = @requestor.request_all_data if all_data - @config.data_store.init(all_data) + @config.feature_store.init(all_data) if @initialized.make_true @config.logger.info { "[LDClient] Polling connection initialized" } @ready.set diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index dc266b79..b94e61f2 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -3,28 +3,28 @@ module LaunchDarkly # - # An implementation of the LaunchDarkly client's data store that uses a Redis + # An implementation of the LaunchDarkly client's feature store that uses a Redis # instance. This object holds feature flags and related data received from the # streaming API. Feature data can also be further cached in memory to reduce overhead # of calls to Redis. # # To use this class, you must first have the `redis` and `connection-pool` gems - # installed. Then, create an instance and store it in the `data_store` property + # installed. Then, create an instance and store it in the `feature_store` property # of your client configuration. # # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific # implementation class may be changed or removed in the future. # - class RedisDataStore - include LaunchDarkly::Interfaces::DataStore + class RedisFeatureStore + include LaunchDarkly::Interfaces::FeatureStore # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating - # to RedisDataStoreCore where the actual database logic is. This class was retained for historical - # reasons, so that existing code can still call RedisDataStore.new. In the future, we will migrate + # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical + # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate # away from exposing these concrete classes and use factory methods instead. # - # Constructor for a RedisDataStore instance. + # Constructor for a RedisFeatureStore instance. # # @param opts [Hash] the configuration options # @option opts [String] :redis_url URL of the Redis instance (shortcut for omitting redis_opts) @@ -38,7 +38,7 @@ class RedisDataStore # @option opts [Boolean] :pool_shutdown_on_close whether calling `close` should shutdown the custom connection pool. # def initialize(opts = {}) - core = LaunchDarkly::Impl::Integrations::Redis::RedisDataStoreCore.new(opts) + core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index bd196488..df50cfd0 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -25,7 +25,7 @@ class StreamProcessor def initialize(sdk_key, config, diagnostic_accumulator = nil) @sdk_key = sdk_key @config = config - @data_store = config.data_store + @feature_store = config.feature_store @initialized = Concurrent::AtomicBoolean.new(false) @started = Concurrent::AtomicBoolean.new(false) @stopped = Concurrent::AtomicBoolean.new(false) @@ -85,7 +85,7 @@ def process_message(message) if method == PUT message = JSON.parse(message.data, symbolize_names: true) all_data = Impl::Model.make_all_store_data(message[:data]) - @data_store.init(all_data) + @feature_store.init(all_data) @initialized.make_true @config.logger.info { "[LDClient] Stream initialized" } @ready.set @@ -96,7 +96,7 @@ def process_message(message) if key data = data[:data] Impl::Model.postprocess_item_after_deserializing!(kind, data) - @data_store.upsert(kind, data) + @feature_store.upsert(kind, data) break end end @@ -105,7 +105,7 @@ def process_message(message) for kind in [FEATURES, SEGMENTS] key = key_for_path(kind, data[:path]) if key - @data_store.delete(kind, key, data[:version]) + @feature_store.delete(kind, key, data[:version]) break end end diff --git a/spec/data_store_spec_base.rb b/spec/feature_store_spec_base.rb similarity index 97% rename from spec/data_store_spec_base.rb rename to spec/feature_store_spec_base.rb index a937d93e..2d06f0ff 100644 --- a/spec/data_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,9 +1,9 @@ require "spec_helper" -shared_examples "data_store" do |create_store_method, clear_data_method| +shared_examples "feature_store" do |create_store_method, clear_data_method| # Rather than testing with feature flag or segment data, we'll use this fake data kind - # to make it clear that data stores need to be able to handle arbitrary data. + # to make it clear that feature stores need to be able to handle arbitrary data. let(:things_kind) { { namespace: "things" } } let(:key1) { "thing1" } diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 07f5b481..212d057b 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -96,7 +96,7 @@ def []=(key, value) before do @config = LaunchDarkly::Config.new(logger: $null_log) - @store = @config.data_store + @store = @config.feature_store @tmp_dir = Dir.mktmpdir end diff --git a/spec/in_memory_data_store_spec.rb b/spec/in_memory_data_store_spec.rb deleted file mode 100644 index e43a2ebb..00000000 --- a/spec/in_memory_data_store_spec.rb +++ /dev/null @@ -1,12 +0,0 @@ -require "data_store_spec_base" -require "spec_helper" - -def create_in_memory_store(opts = {}) - LaunchDarkly::InMemoryDataStore.new -end - -describe LaunchDarkly::InMemoryDataStore do - subject { LaunchDarkly::InMemoryDataStore } - - include_examples "data_store", method(:create_in_memory_store) -end diff --git a/spec/in_memory_feature_store_spec.rb b/spec/in_memory_feature_store_spec.rb new file mode 100644 index 00000000..c403fc69 --- /dev/null +++ b/spec/in_memory_feature_store_spec.rb @@ -0,0 +1,12 @@ +require "feature_store_spec_base" +require "spec_helper" + +def create_in_memory_store(opts = {}) + LaunchDarkly::InMemoryFeatureStore.new +end + +describe LaunchDarkly::InMemoryFeatureStore do + subject { LaunchDarkly::InMemoryFeatureStore } + + include_examples "feature_store", method(:create_in_memory_store) +end diff --git a/spec/integrations/consul_data_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb similarity index 63% rename from spec/integrations/consul_data_store_spec.rb rename to spec/integrations/consul_feature_store_spec.rb index 1f254bd7..bad1e736 100644 --- a/spec/integrations/consul_data_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -1,4 +1,4 @@ -require "data_store_spec_base" +require "feature_store_spec_base" require "diplomat" require "spec_helper" @@ -11,12 +11,12 @@ } def create_consul_store(opts = {}) - LaunchDarkly::Integrations::Consul::new_data_store( + LaunchDarkly::Integrations::Consul::new_feature_store( $consul_base_opts.merge(opts).merge({ expiration: 60 })) end def create_consul_store_uncached(opts = {}) - LaunchDarkly::Integrations::Consul::new_data_store( + LaunchDarkly::Integrations::Consul::new_feature_store( $consul_base_opts.merge(opts).merge({ expiration: 0 })) end @@ -25,16 +25,16 @@ def clear_all_data end -describe "Consul data store" do +describe "Consul feature store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local Consul instance running. context "with local cache" do - include_examples "data_store", method(:create_consul_store), method(:clear_all_data) + include_examples "feature_store", method(:create_consul_store), method(:clear_all_data) end context "without local cache" do - include_examples "data_store", method(:create_consul_store_uncached), method(:clear_all_data) + include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data) end end diff --git a/spec/integrations/dynamodb_data_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb similarity index 85% rename from spec/integrations/dynamodb_data_store_spec.rb rename to spec/integrations/dynamodb_feature_store_spec.rb index 7f4e4673..3b95edc8 100644 --- a/spec/integrations/dynamodb_data_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -1,4 +1,4 @@ -require "data_store_spec_base" +require "feature_store_spec_base" require "aws-sdk-dynamodb" require "spec_helper" @@ -20,12 +20,12 @@ } def create_dynamodb_store(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_data_store($table_name, + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, $ddb_base_opts.merge(opts).merge({ expiration: 60 })) end def create_dynamodb_store_uncached(opts = {}) - LaunchDarkly::Integrations::DynamoDB::new_data_store($table_name, + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, $ddb_base_opts.merge(opts).merge({ expiration: 0 })) end @@ -86,7 +86,7 @@ def create_test_client end -describe "DynamoDB data store" do +describe "DynamoDB feature store" do break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local DynamoDB instance running. @@ -94,10 +94,10 @@ def create_test_client create_table_if_necessary context "with local cache" do - include_examples "data_store", method(:create_dynamodb_store), method(:clear_all_data) + include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data) end context "without local cache" do - include_examples "data_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) + include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 4ea9522f..76e5b0f7 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -7,7 +7,7 @@ let(:offline_client) do subject.new("secret", offline_config) end - let(:null_data) { LaunchDarkly::NullDataSource.new } + let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } let(:logger) { double().as_null_object } let(:config) { LaunchDarkly::Config.new({ send_events: false, data_source: null_data, logger: logger }) } let(:client) do @@ -110,21 +110,21 @@ def event_processor end it "returns the value for an existing feature" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(client.variation("key", user, "default")).to eq "value" end it "returns the default value if a feature evaluates to nil" do empty_feature = { key: "key", on: false, offVariation: nil } - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, empty_feature) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) expect(client.variation("key", user, "default")).to eq "default" end it "queues a feature request event for an existing feature" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -140,8 +140,8 @@ def event_processor end it "queues a feature event for an existing feature when user is nil" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -156,8 +156,8 @@ def event_processor end it "queues a feature event for an existing feature when user key is nil" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) bad_user = { name: "Bob" } expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", @@ -187,8 +187,8 @@ def event_processor trackEvents: true ] } - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, flag) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) expect(event_processor).to receive(:add_event).with(hash_including( kind: 'feature', key: 'flag', @@ -212,8 +212,8 @@ def event_processor rules: [], trackEventsFallthrough: true } - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, flag) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) expect(event_processor).to receive(:add_event).with(hash_including( kind: 'feature', key: 'flag', @@ -255,8 +255,8 @@ def event_processor end it "returns a value for an existing feature" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) result = client.variation_detail("key", user, "default") expected = LaunchDarkly::EvaluationDetail.new("value", 0, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected @@ -264,8 +264,8 @@ def event_processor it "returns the default value if a feature evaluates to nil" do empty_feature = { key: "key", on: false, offVariation: nil } - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, empty_feature) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) result = client.variation_detail("key", user, "default") expected = LaunchDarkly::EvaluationDetail.new("default", nil, LaunchDarkly::EvaluationReason::off) expect(result).to eq expected @@ -273,8 +273,8 @@ def event_processor end it "queues a feature request event for an existing feature" do - config.data_store.init({ LaunchDarkly::FEATURES => {} }) - config.data_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "key", @@ -296,28 +296,28 @@ def event_processor let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } it "returns flag values" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags({ key: 'userkey' }) expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) end it "returns empty map for nil user" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags(nil) expect(result).to eq({}) end it "returns empty map for nil user key" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = client.all_flags({}) expect(result).to eq({}) end it "returns empty map if offline" do - offline_config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) result = offline_client.all_flags(nil) expect(result).to eq({}) @@ -329,7 +329,7 @@ def event_processor let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } it "returns flags state" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be true @@ -362,7 +362,7 @@ def event_processor flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } - config.data_store.init({ LaunchDarkly::FEATURES => { + config.feature_store.init({ LaunchDarkly::FEATURES => { flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 }}) @@ -379,7 +379,7 @@ def event_processor flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) expect(state.valid?).to be true @@ -412,7 +412,7 @@ def event_processor end it "returns empty state for nil user" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state(nil) expect(state.valid?).to be false @@ -420,7 +420,7 @@ def event_processor end it "returns empty state for nil user key" do - config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = client.all_flags_state({}) expect(state.valid?).to be false @@ -428,7 +428,7 @@ def event_processor end it "returns empty state if offline" do - offline_config.data_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) state = offline_client.all_flags_state({ key: 'userkey' }) expect(state.valid?).to be false @@ -512,7 +512,7 @@ def event_processor end end - describe "data store data ordering" do + describe "feature store data ordering" do let(:dependency_ordering_test_data) { { LaunchDarkly::FEATURES => { @@ -529,7 +529,7 @@ def event_processor } } - class FakeDataStore + class FakeFeatureStore attr_reader :received_data def init(all_data) @@ -537,7 +537,7 @@ def init(all_data) end end - class FakeDataSource + class FakeUpdateProcessor def initialize(store, data) @store = store @data = data @@ -558,11 +558,11 @@ def initialized? end end - it "passes data set to data store in correct order on init" do - store = FakeDataStore.new - data_source_factory = lambda { |sdk_key, config| FakeDataSource.new(config.data_store, + it "passes data set to feature store in correct order on init" do + store = FakeFeatureStore.new + data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store, dependency_ordering_test_data) } - config = LaunchDarkly::Config.new(send_events: false, data_store: store, data_source: data_source_factory) + config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory) client = subject.new("secret", config) data = store.received_data diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index f91ddc62..ca36364c 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -6,7 +6,7 @@ let(:requestor) { double() } def with_processor(store) - config = LaunchDarkly::Config.new(data_store: store, logger: $null_log) + config = LaunchDarkly::Config.new(feature_store: store, logger: $null_log) processor = subject.new(config, requestor) begin yield processor @@ -29,7 +29,7 @@ def with_processor(store) it 'puts feature data in store' do allow(requestor).to receive(:request_all_data).and_return(all_data) - store = LaunchDarkly::InMemoryDataStore.new + store = LaunchDarkly::InMemoryFeatureStore.new with_processor(store) do |processor| ready = processor.start ready.wait @@ -40,7 +40,7 @@ def with_processor(store) it 'sets initialized to true' do allow(requestor).to receive(:request_all_data).and_return(all_data) - store = LaunchDarkly::InMemoryDataStore.new + store = LaunchDarkly::InMemoryFeatureStore.new with_processor(store) do |processor| ready = processor.start ready.wait @@ -53,7 +53,7 @@ def with_processor(store) describe 'connection error' do it 'does not cause immediate failure, does not set initialized' do allow(requestor).to receive(:request_all_data).and_raise(StandardError.new("test error")) - store = LaunchDarkly::InMemoryDataStore.new + store = LaunchDarkly::InMemoryFeatureStore.new with_processor(store) do |processor| ready = processor.start finished = ready.wait(0.2) @@ -67,7 +67,7 @@ def with_processor(store) describe 'HTTP errors' do def verify_unrecoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| ready = processor.start finished = ready.wait(0.2) expect(finished).to be true @@ -77,7 +77,7 @@ def verify_unrecoverable_http_error(status) def verify_recoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| ready = processor.start finished = ready.wait(0.2) expect(finished).to be false @@ -108,7 +108,7 @@ def verify_recoverable_http_error(status) describe 'stop' do it 'stops promptly rather than continuing to wait for poll interval' do - with_processor(LaunchDarkly::InMemoryDataStore.new) do |processor| + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| sleep(1) # somewhat arbitrary, but should ensure that it has started polling start_time = Time.now processor.stop diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index 6ca3a4f5..6dd5733e 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -1,4 +1,4 @@ -require "data_store_spec_base" +require "feature_store_spec_base" require "connection_pool" require "json" require "redis" @@ -13,11 +13,11 @@ } def create_redis_store(opts = {}) - LaunchDarkly::RedisDataStore.new($base_opts.merge(opts).merge({ expiration: 60 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 60 })) end def create_redis_store_uncached(opts = {}) - LaunchDarkly::RedisDataStore.new($base_opts.merge(opts).merge({ expiration: 0 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 0 })) end def clear_all_data @@ -26,19 +26,19 @@ def clear_all_data end -describe LaunchDarkly::RedisDataStore do - subject { LaunchDarkly::RedisDataStore } +describe LaunchDarkly::RedisFeatureStore do + subject { LaunchDarkly::RedisFeatureStore } break if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do - include_examples "data_store", method(:create_redis_store), method(:clear_all_data) + include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) end context "real Redis without local cache" do - include_examples "data_store", method(:create_redis_store_uncached), method(:clear_all_data) + include_examples "feature_store", method(:create_redis_store_uncached), method(:clear_all_data) end def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index de36ae0a..39c678c4 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -15,26 +15,26 @@ it "will accept PUT methods" do processor.send(:process_message, put_message) - expect(config.data_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") - expect(config.data_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") + expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") end it "will accept PATCH methods for flags" do processor.send(:process_message, patch_flag_message) - expect(config.data_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) + expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) end it "will accept PATCH methods for segments" do processor.send(:process_message, patch_seg_message) - expect(config.data_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) end it "will accept DELETE methods for flags" do processor.send(:process_message, patch_flag_message) processor.send(:process_message, delete_flag_message) - expect(config.data_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) + expect(config.feature_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) end it "will accept DELETE methods for segments" do processor.send(:process_message, patch_seg_message) processor.send(:process_message, delete_seg_message) - expect(config.data_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) + expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn From 97d2ca0d76d1ef767dab64317f59e43294e0aec7 Mon Sep 17 00:00:00 2001 From: hroederld Date: Fri, 22 Jan 2021 14:15:58 -0800 Subject: [PATCH 167/182] [ch92483] Use http gem and add socket factory support (#142) --- .circleci/config.yml | 1 + Gemfile.lock | 36 +++++++--- launchdarkly-server-sdk.gemspec | 3 +- lib/ldclient-rb/config.rb | 12 ++++ lib/ldclient-rb/events.rb | 5 +- lib/ldclient-rb/impl/event_sender.rb | 96 +++++++++++++++----------- lib/ldclient-rb/impl/unbounded_pool.rb | 34 +++++++++ lib/ldclient-rb/requestor.rb | 29 ++++---- lib/ldclient-rb/stream.rb | 3 +- lib/ldclient-rb/util.rb | 20 +++--- spec/event_sender_spec.rb | 22 +++++- spec/http_util.rb | 12 +++- spec/ldclient_end_to_end_spec.rb | 34 +++++++++ 13 files changed, 228 insertions(+), 79 deletions(-) create mode 100644 lib/ldclient-rb/impl/unbounded_pool.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index f976071f..ef162444 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,6 +16,7 @@ ruby-docker-template: &ruby-docker-template - run: | if [[ $CIRCLE_JOB == test-jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI + sudo apt-get update -y && sudo apt-get install -y build-essential fi - run: ruby -v - run: gem install bundler -v 1.17.3 diff --git a/Gemfile.lock b/Gemfile.lock index 81bd5ac1..1b634bf4 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -3,13 +3,16 @@ PATH specs: launchdarkly-server-sdk (5.8.2) concurrent-ruby (~> 1.0) + http (~> 4.4.1) json (~> 2.3.1) - ld-eventsource (= 1.0.3) + ld-eventsource (= 2.0.0.pre.beta.1) semantic (~> 1.6) GEM remote: https://rubygems.org/ specs: + addressable (2.7.0) + public_suffix (>= 2.0.2, < 5.0) aws-eventstream (1.1.0) aws-partitions (1.388.0) aws-sdk-core (3.109.1) @@ -22,28 +25,42 @@ GEM aws-sigv4 (~> 1.1) aws-sigv4 (1.2.2) aws-eventstream (~> 1, >= 1.0.2) - concurrent-ruby (1.1.7) + concurrent-ruby (1.1.8) connection_pool (2.2.3) deep_merge (1.2.1) diff-lcs (1.4.4) diplomat (2.4.2) deep_merge (~> 1.0, >= 1.0.1) faraday (>= 0.9, < 1.1.0) + domain_name (0.5.20190701) + unf (>= 0.0.5, < 1.0.0) faraday (0.17.3) multipart-post (>= 1.2, < 3) ffi (1.12.0) - hitimes (1.3.1) - http_tools (0.4.5) + ffi-compiler (1.0.1) + ffi (>= 1.0.0) + rake + http (4.4.1) + addressable (~> 2.3) + http-cookie (~> 1.0) + http-form_data (~> 2.2) + http-parser (~> 1.2.0) + http-cookie (1.0.3) + domain_name (~> 0.5) + http-form_data (2.3.0) + http-parser (1.2.3) + ffi-compiler (>= 1.0, < 2.0) jmespath (1.4.0) json (2.3.1) - ld-eventsource (1.0.3) + ld-eventsource (2.0.0.pre.beta.1) concurrent-ruby (~> 1.0) - http_tools (~> 0.4.5) - socketry (~> 0.5.1) + http (~> 4.4.1) listen (3.2.1) rb-fsevent (~> 0.10, >= 0.10.3) rb-inotify (~> 0.9, >= 0.9.10) multipart-post (2.1.1) + public_suffix (4.0.6) + rake (13.0.3) rb-fsevent (0.10.4) rb-inotify (0.10.1) ffi (~> 1.0) @@ -64,9 +81,10 @@ GEM rspec_junit_formatter (0.3.0) rspec-core (>= 2, < 4, != 2.12.0) semantic (1.6.1) - socketry (0.5.1) - hitimes (~> 1.2) timecop (0.9.2) + unf (0.1.4) + unf_ext + unf_ext (0.0.7.7) PLATFORMS ruby diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index d2d80678..1726f5af 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -38,9 +38,10 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" - spec.add_runtime_dependency "ld-eventsource", "1.0.3" + spec.add_runtime_dependency "ld-eventsource", "2.0.0.pre.beta.1" # lock json to 2.3.x as ruby libraries often remove # support for older ruby versions in minor releases spec.add_runtime_dependency "json", "~> 2.3.1" + spec.add_runtime_dependency "http", "~> 4.4.1" end diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index df0c73b4..edb21924 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -41,6 +41,7 @@ class Config # @option opts [Float] :diagnostic_recording_interval (900) See {#diagnostic_recording_interval}. # @option opts [String] :wrapper_name See {#wrapper_name}. # @option opts [String] :wrapper_version See {#wrapper_version}. + # @option opts [#open] :socket_factory See {#socket_factory}. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -71,6 +72,7 @@ def initialize(opts = {}) opts[:diagnostic_recording_interval] : Config.default_diagnostic_recording_interval @wrapper_name = opts[:wrapper_name] @wrapper_version = opts[:wrapper_version] + @socket_factory = opts[:socket_factory] end # @@ -305,6 +307,16 @@ def diagnostic_opt_out? # attr_reader :wrapper_version + # + # The factory used to construct sockets for HTTP operations. The factory must + # provide the method `open(uri, timeout)`. The `open` method must return a + # connected stream that implements the `IO` class, such as a `TCPSocket`. + # + # Defaults to nil. + # @return [#open] + # + attr_reader :socket_factory + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index a5352a0b..2e26e1fa 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -238,10 +238,7 @@ def do_shutdown(flush_workers, diagnostic_event_workers) diagnostic_event_workers.shutdown diagnostic_event_workers.wait_for_termination end - begin - @client.finish - rescue - end + @event_sender.stop if @event_sender.respond_to?(:stop) end def synchronize_for_testing(flush_workers, diagnostic_event_workers) diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index f6da0843..442af033 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -1,4 +1,7 @@ +require "ldclient-rb/impl/unbounded_pool" + require "securerandom" +require "http" module LaunchDarkly module Impl @@ -9,62 +12,75 @@ class EventSender DEFAULT_RETRY_INTERVAL = 1 def initialize(sdk_key, config, http_client = nil, retry_interval = DEFAULT_RETRY_INTERVAL) - @client = http_client ? http_client : LaunchDarkly::Util.new_http_client(config.events_uri, config) @sdk_key = sdk_key @config = config @events_uri = config.events_uri + "/bulk" @diagnostic_uri = config.events_uri + "/diagnostic" @logger = config.logger @retry_interval = retry_interval + @http_client_pool = UnboundedPool.new( + lambda { LaunchDarkly::Util.new_http_client(@config.events_uri, @config) }, + lambda { |client| client.close }) + end + + def stop + @http_client_pool.dispose_all() end def send_event_data(event_data, description, is_diagnostic) uri = is_diagnostic ? @diagnostic_uri : @events_uri payload_id = is_diagnostic ? nil : SecureRandom.uuid - res = nil - (0..1).each do |attempt| - if attempt > 0 - @logger.warn { "[LDClient] Will retry posting events after #{@retry_interval} second" } - sleep(@retry_interval) - end - begin - @client.start if !@client.started? - @logger.debug { "[LDClient] sending #{description}: #{event_data}" } - req = Net::HTTP::Post.new(uri) - req.content_type = "application/json" - req.body = event_data - Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| req[k] = v } - if !is_diagnostic - req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - req["X-LaunchDarkly-Payload-ID"] = payload_id + begin + http_client = @http_client_pool.acquire() + response = nil + (0..1).each do |attempt| + if attempt > 0 + @logger.warn { "[LDClient] Will retry posting events after #{@retry_interval} second" } + sleep(@retry_interval) end - req["Connection"] = "keep-alive" - res = @client.request(req) - rescue StandardError => exn - @logger.warn { "[LDClient] Error sending events: #{exn.inspect}." } - next - end - status = res.code.to_i - if status >= 200 && status < 300 - res_time = nil - if !res["date"].nil? - begin - res_time = Time.httpdate(res["date"]) - rescue ArgumentError + begin + @logger.debug { "[LDClient] sending #{description}: #{event_data}" } + headers = {} + headers["content-type"] = "application/json" + Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| headers[k] = v } + if !is_diagnostic + headers["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + headers["X-LaunchDarkly-Payload-ID"] = payload_id end + response = http_client.request("POST", uri, { + headers: headers, + body: event_data + }) + rescue StandardError => exn + @logger.warn { "[LDClient] Error sending events: #{exn.inspect}." } + next + end + status = response.status.code + # must fully read body for persistent connections + body = response.to_s + if status >= 200 && status < 300 + res_time = nil + if !response.headers["date"].nil? + begin + res_time = Time.httpdate(response.headers["date"]) + rescue ArgumentError + end + end + return EventSenderResult.new(true, false, res_time) + end + must_shutdown = !LaunchDarkly::Util.http_error_recoverable?(status) + can_retry = !must_shutdown && attempt == 0 + message = LaunchDarkly::Util.http_error_message(status, "event delivery", can_retry ? "will retry" : "some events were dropped") + @logger.error { "[LDClient] #{message}" } + if must_shutdown + return EventSenderResult.new(false, true, nil) end - return EventSenderResult.new(true, false, res_time) - end - must_shutdown = !LaunchDarkly::Util.http_error_recoverable?(status) - can_retry = !must_shutdown && attempt == 0 - message = LaunchDarkly::Util.http_error_message(status, "event delivery", can_retry ? "will retry" : "some events were dropped") - @logger.error { "[LDClient] #{message}" } - if must_shutdown - return EventSenderResult.new(false, true, nil) end + # used up our retries + return EventSenderResult.new(false, false, nil) + ensure + @http_client_pool.release(http_client) end - # used up our retries - return EventSenderResult.new(false, false, nil) end end end diff --git a/lib/ldclient-rb/impl/unbounded_pool.rb b/lib/ldclient-rb/impl/unbounded_pool.rb new file mode 100644 index 00000000..55bd515f --- /dev/null +++ b/lib/ldclient-rb/impl/unbounded_pool.rb @@ -0,0 +1,34 @@ +module LaunchDarkly + module Impl + # A simple thread safe generic unbounded resource pool abstraction + class UnboundedPool + def initialize(instance_creator, instance_destructor) + @pool = Array.new + @lock = Mutex.new + @instance_creator = instance_creator + @instance_destructor = instance_destructor + end + + def acquire + @lock.synchronize { + if @pool.length == 0 + @instance_creator.call() + else + @pool.pop() + end + } + end + + def release(instance) + @lock.synchronize { @pool.push(instance) } + end + + def dispose_all + @lock.synchronize { + @pool.map { |instance| @instance_destructor.call(instance) } if !@instance_destructor.nil? + @pool.clear() + } + end + end + end +end \ No newline at end of file diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 71399bbf..35c5e365 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -3,6 +3,7 @@ require "concurrent/atomics" require "json" require "uri" +require "http" module LaunchDarkly # @private @@ -24,7 +25,7 @@ class Requestor def initialize(sdk_key, config) @sdk_key = sdk_key @config = config - @client = Util.new_http_client(@config.base_uri, @config) + @http_client = LaunchDarkly::Util.new_http_client(config.base_uri, config) @cache = @config.cache_store end @@ -35,7 +36,7 @@ def request_all_data() def stop begin - @client.finish + @http_client.close rescue end end @@ -47,19 +48,21 @@ def request_single_item(kind, path) end def make_request(path) - @client.start if !@client.started? uri = URI(@config.base_uri + path) - req = Net::HTTP::Get.new(uri) - Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| req[k] = v } - req["Connection"] = "keep-alive" + headers = {} + Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| headers[k] = v } + headers["Connection"] = "keep-alive" cached = @cache.read(uri) if !cached.nil? - req["If-None-Match"] = cached.etag + headers["If-None-Match"] = cached.etag end - res = @client.request(req) - status = res.code.to_i - @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } - + response = @http_client.request("GET", uri, { + headers: headers + }) + status = response.status.code + @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{response.headers}\n\tbody: #{res.to_s}" } + # must fully read body for persistent connections + body = response.to_s if status == 304 && !cached.nil? body = cached.body else @@ -67,8 +70,8 @@ def make_request(path) if status < 200 || status >= 300 raise UnexpectedResponseError.new(status) end - body = fix_encoding(res.body, res["content-type"]) - etag = res["etag"] + body = fix_encoding(body, response.headers["content-type"]) + etag = response.headers["etag"] @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end body diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index df50cfd0..64275b39 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -46,7 +46,8 @@ def start opts = { headers: headers, read_timeout: READ_TIMEOUT_SECONDS, - logger: @config.logger + logger: @config.logger, + socket_factory: @config.socket_factory } log_connection_started @es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn| diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index e129c279..cfd09d8d 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,5 +1,5 @@ -require "net/http" require "uri" +require "http" module LaunchDarkly # @private @@ -18,14 +18,18 @@ def self.stringify_attrs(hash, attrs) end ret end - + def self.new_http_client(uri_s, config) - uri = URI(uri_s) - client = Net::HTTP.new(uri.hostname, uri.port) - client.use_ssl = true if uri.scheme == "https" - client.open_timeout = config.connect_timeout - client.read_timeout = config.read_timeout - client + http_client_options = {} + if config.socket_factory + http_client_options["socket_class"] = config.socket_factory + end + return HTTP::Client.new(http_client_options) + .timeout({ + read: config.read_timeout, + connect: config.connect_timeout + }) + .persistent(uri_s) end def self.log_exception(logger, message, exc) diff --git a/spec/event_sender_spec.rb b/spec/event_sender_spec.rb index 0519aebb..5ad3f2f1 100644 --- a/spec/event_sender_spec.rb +++ b/spec/event_sender_spec.rb @@ -39,12 +39,29 @@ def with_sender_and_server "authorization" => [ sdk_key ], "content-type" => [ "application/json" ], "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], - "x-launchdarkly-event-schema" => [ "3" ] + "x-launchdarkly-event-schema" => [ "3" ], + "connection" => [ "Keep-Alive" ] }) expect(req.header['x-launchdarkly-payload-id']).not_to eq [] end end - + + it "can use a socket factory" do + with_server do |server| + server.setup_ok_response("/bulk", "") + + config = Config.new(events_uri: "http://events.com/bulk", socket_factory: SocketFactoryFromHash.new({"events.com" => server.port}), logger: $null_log) + es = subject.new(sdk_key, config, nil, 0.1) + + result = es.send_event_data(fake_data, "", false) + + expect(result.success).to be true + req = server.await_request + expect(req.body).to eq fake_data + expect(req.host).to eq "events.com" + end + end + it "generates a new payload ID for each payload" do with_sender_and_server do |es, server| server.setup_ok_response("/bulk", "") @@ -78,6 +95,7 @@ def with_sender_and_server "authorization" => [ sdk_key ], "content-type" => [ "application/json" ], "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + "connection" => [ "Keep-Alive" ] }) expect(req.header['x-launchdarkly-event-schema']).to eq [] expect(req.header['x-launchdarkly-payload-id']).to eq [] diff --git a/spec/http_util.rb b/spec/http_util.rb index 27032589..1a789772 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -3,7 +3,7 @@ require "webrick/https" class StubHTTPServer - attr_reader :requests + attr_reader :requests, :port @@next_port = 50000 @@ -120,3 +120,13 @@ def with_server(server = nil) server.stop end end + +class SocketFactoryFromHash + def initialize(ports = {}) + @ports = ports + end + + def open(uri, timeout) + TCPSocket.new 'localhost', @ports[uri] + end +end \ No newline at end of file diff --git a/spec/ldclient_end_to_end_spec.rb b/spec/ldclient_end_to_end_spec.rb index b93a98b4..a820b608 100644 --- a/spec/ldclient_end_to_end_spec.rb +++ b/spec/ldclient_end_to_end_spec.rb @@ -80,6 +80,7 @@ module LaunchDarkly req, body = events_server.await_request_with_body expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] data = JSON.parse(body) expect(data.length).to eq 1 expect(data[0]["kind"]).to eq "identify" @@ -111,6 +112,7 @@ module LaunchDarkly req = req0.path == "/diagnostic" ? req0 : req1 body = req0.path == "/diagnostic" ? body0 : body1 expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] data = JSON.parse(body) expect(data["kind"]).to eq "diagnostic-init" end @@ -118,6 +120,38 @@ module LaunchDarkly end end + it "can use socket factory" do + with_server do |poll_server| + with_server do |events_server| + events_server.setup_ok_response("/bulk", "") + poll_server.setup_ok_response("/sdk/latest-all", '{"flags":{},"segments":{}}', "application/json") + + config = Config.new( + stream: false, + base_uri: "http://polling.com", + events_uri: "http://events.com", + diagnostic_opt_out: true, + logger: NullLogger.new, + socket_factory: SocketFactoryFromHash.new({ + "polling.com" => poll_server.port, + "events.com" => events_server.port + }) + ) + with_client(config) do |client| + client.identify(USER) + client.flush + + req, body = events_server.await_request_with_body + expect(req.header['authorization']).to eq [ SDK_KEY ] + expect(req.header['connection']).to eq [ "Keep-Alive" ] + data = JSON.parse(body) + expect(data.length).to eq 1 + expect(data[0]["kind"]).to eq "identify" + end + end + end + end + # TODO: TLS tests with self-signed cert end end From c7690118dc432df274f99db5ed2b3a4518022a54 Mon Sep 17 00:00:00 2001 From: Elliot <35050275+Apache-HB@users.noreply.github.com> Date: Mon, 25 Jan 2021 17:15:28 -0500 Subject: [PATCH 168/182] update dependencies and add CI for ruby 3 (#141) --- .circleci/config.yml | 18 ++--- CONTRIBUTING.md | 2 +- Gemfile.lock | 73 +++++++++++-------- README.md | 4 +- azure-pipelines.yml | 2 +- launchdarkly-server-sdk.gemspec | 30 ++++---- .../impl/integrations/redis_impl.rb | 4 +- spec/launchdarkly-server-sdk_spec.rb | 2 +- 8 files changed, 70 insertions(+), 65 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index ef162444..6e7dd560 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -4,10 +4,10 @@ workflows: version: 2 test: jobs: - - test-2.4 - test-2.5 - test-2.6 - test-2.7 + - test-3.0 - test-jruby-9.2 ruby-docker-template: &ruby-docker-template @@ -19,7 +19,7 @@ ruby-docker-template: &ruby-docker-template sudo apt-get update -y && sudo apt-get install -y build-essential fi - run: ruby -v - - run: gem install bundler -v 1.17.3 + - run: gem install bundler - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec @@ -29,13 +29,6 @@ ruby-docker-template: &ruby-docker-template path: ./rspec jobs: - test-2.4: - <<: *ruby-docker-template - docker: - - image: circleci/ruby:2.4 - - image: consul - - image: redis - - image: amazon/dynamodb-local test-2.5: <<: *ruby-docker-template docker: @@ -57,6 +50,13 @@ jobs: - image: consul - image: redis - image: amazon/dynamodb-local + test-3.0: + <<: *ruby-docker-template + docker: + - image: circleci/ruby:3.0 + - image: consul + - image: redis + - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ac126eec..fb244f5c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -18,7 +18,7 @@ Build instructions ### Prerequisites -This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler -v 1.17.3`. You might need `sudo` to execute the command successfully. As of this writing, the SDK does not support being built with Bundler 2.0. +This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler`. You might need `sudo` to execute the command successfully. To install the runtime dependencies: diff --git a/Gemfile.lock b/Gemfile.lock index 1b634bf4..f47034a8 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -2,7 +2,7 @@ PATH remote: . specs: launchdarkly-server-sdk (5.8.2) - concurrent-ruby (~> 1.0) + concurrent-ruby (~> 1.1) http (~> 4.4.1) json (~> 2.3.1) ld-eventsource (= 2.0.0.pre.beta.1) @@ -13,14 +13,16 @@ GEM specs: addressable (2.7.0) public_suffix (>= 2.0.2, < 5.0) + ansi (1.5.0) + ast (2.4.2) aws-eventstream (1.1.0) - aws-partitions (1.388.0) - aws-sdk-core (3.109.1) + aws-partitions (1.418.0) + aws-sdk-core (3.111.2) aws-eventstream (~> 1, >= 1.0.2) aws-partitions (~> 1, >= 1.239.0) aws-sigv4 (~> 1.1) jmespath (~> 1.0) - aws-sdk-dynamodb (1.55.0) + aws-sdk-dynamodb (1.58.0) aws-sdk-core (~> 3, >= 3.109.0) aws-sigv4 (~> 1.1) aws-sigv4 (1.2.2) @@ -34,9 +36,9 @@ GEM faraday (>= 0.9, < 1.1.0) domain_name (0.5.20190701) unf (>= 0.0.5, < 1.0.0) - faraday (0.17.3) + faraday (1.0.1) multipart-post (>= 1.2, < 3) - ffi (1.12.0) + ffi (1.14.2) ffi-compiler (1.0.1) ffi (>= 1.0.0) rake @@ -55,53 +57,60 @@ GEM ld-eventsource (2.0.0.pre.beta.1) concurrent-ruby (~> 1.0) http (~> 4.4.1) - listen (3.2.1) + listen (3.4.1) rb-fsevent (~> 0.10, >= 0.10.3) rb-inotify (~> 0.9, >= 0.9.10) multipart-post (2.1.1) + oga (2.15) + ast + ruby-ll (~> 2.1) public_suffix (4.0.6) rake (13.0.3) rb-fsevent (0.10.4) rb-inotify (0.10.1) ffi (~> 1.0) - redis (3.3.5) - rspec (3.9.0) - rspec-core (~> 3.9.0) - rspec-expectations (~> 3.9.0) - rspec-mocks (~> 3.9.0) - rspec-core (3.9.3) - rspec-support (~> 3.9.3) - rspec-expectations (3.9.3) + redis (4.2.5) + rspec (3.10.0) + rspec-core (~> 3.10.0) + rspec-expectations (~> 3.10.0) + rspec-mocks (~> 3.10.0) + rspec-core (3.10.1) + rspec-support (~> 3.10.0) + rspec-expectations (3.10.1) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.9.0) - rspec-mocks (3.9.1) + rspec-support (~> 3.10.0) + rspec-mocks (3.10.1) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.9.0) - rspec-support (3.9.4) - rspec_junit_formatter (0.3.0) + rspec-support (~> 3.10.0) + rspec-support (3.10.1) + rspec_junit_formatter (0.4.1) rspec-core (>= 2, < 4, != 2.12.0) + ruby-ll (2.1.2) + ansi + ast semantic (1.6.1) timecop (0.9.2) unf (0.1.4) unf_ext unf_ext (0.0.7.7) + webrick (1.7.0) PLATFORMS ruby DEPENDENCIES - aws-sdk-dynamodb (~> 1.18) - bundler (~> 1.17) - connection_pool (>= 2.1.2) - diplomat (>= 2.0.2) - faraday (~> 0.17) - ffi (<= 1.12) + aws-sdk-dynamodb (~> 1.57) + bundler (~> 2.1) + connection_pool (~> 2.2.3) + diplomat (~> 2.4.2) launchdarkly-server-sdk! - listen (~> 3.0) - redis (~> 3.3.5) - rspec (~> 3.2) - rspec_junit_formatter (~> 0.3.0) - timecop (~> 0.9.1) + listen (~> 3.3) + oga (~> 2.2) + redis (~> 4.2) + rspec (~> 3.10) + rspec_junit_formatter (~> 0.4) + timecop (~> 0.9) + webrick (~> 1.7) BUNDLED WITH - 1.17.3 + 2.2.3 diff --git a/README.md b/README.md index 2a61c06c..ef8c0e33 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@ LaunchDarkly overview Supported Ruby versions ----------------------- -This version of the LaunchDarkly SDK has a minimum Ruby version of 2.3.0, or 9.2.0 for JRuby. +This version of the LaunchDarkly SDK has a minimum Ruby version of 2.5.0, or 9.2.0 for JRuby. Getting started ----------- @@ -55,4 +55,4 @@ About LaunchDarkly * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates - * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies \ No newline at end of file + * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 3d3fd98a..88296f02 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,7 +45,7 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | ruby -v - gem install bundler -v 1.17.3 + gem install bundler bundle install mkdir rspec bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 1726f5af..411ba4c1 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -19,25 +19,23 @@ Gem::Specification.new do |spec| spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.test_files = spec.files.grep(%r{^(test|spec|features)/}) spec.require_paths = ["lib"] - spec.required_ruby_version = ">= 2.4.0" + spec.required_ruby_version = ">= 2.5.0" - spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" - spec.add_development_dependency "bundler", "~> 1.17" - spec.add_development_dependency "rspec", "~> 3.2" - spec.add_development_dependency "diplomat", ">= 2.0.2" - spec.add_development_dependency "redis", "~> 3.3.5" - spec.add_development_dependency "connection_pool", ">= 2.1.2" - spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0" - spec.add_development_dependency "timecop", "~> 0.9.1" - spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb - # these are transitive dependencies of listen and consul respectively - # we constrain them here to make sure the ruby 2.2, 2.3, and 2.4 CI - # cases all pass - spec.add_development_dependency "ffi", "<= 1.12" # >1.12 doesnt support ruby 2.2 - spec.add_development_dependency "faraday", "~> 0.17" # >=0.18 doesnt support ruby 2.2 + spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.57" + spec.add_development_dependency "bundler", "~> 2.1" + spec.add_development_dependency "rspec", "~> 3.10" + spec.add_development_dependency "diplomat", "~> 2.4.2" + spec.add_development_dependency "redis", "~> 4.2" + spec.add_development_dependency "connection_pool", "~> 2.2.3" + spec.add_development_dependency "rspec_junit_formatter", "~> 0.4" + spec.add_development_dependency "timecop", "~> 0.9" + spec.add_development_dependency "listen", "~> 3.3" # see file_data_source.rb + spec.add_development_dependency "webrick", "~> 1.7" + # required by dynamodb + spec.add_development_dependency "oga", "~> 2.2" spec.add_runtime_dependency "semantic", "~> 1.6" - spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" + spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" spec.add_runtime_dependency "ld-eventsource", "2.0.0.pre.beta.1" # lock json to 2.3.x as ruby libraries often remove diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index a4cb1365..f948e54a 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -114,9 +114,7 @@ def upsert_internal(kind, new_item) end def initialized_internal? - with_connection do |redis| - redis.respond_to?(:exists?) ? redis.exists?(inited_key) : redis.exists(inited_key) - end + with_connection { |redis| redis.exists?(inited_key) } end def stop diff --git a/spec/launchdarkly-server-sdk_spec.rb b/spec/launchdarkly-server-sdk_spec.rb index b594dac8..6dfa4808 100644 --- a/spec/launchdarkly-server-sdk_spec.rb +++ b/spec/launchdarkly-server-sdk_spec.rb @@ -4,7 +4,7 @@ describe LaunchDarkly do it "can be automatically loaded by Bundler.require" do ldclient_loaded = - Bundler.with_clean_env do + Bundler.with_unbundled_env do Kernel.system("ruby", "./spec/launchdarkly-server-sdk_spec_autoloadtest.rb") end From 91692ca4c33f132b02bd0d991ed4369189f45dec Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Tue, 26 Jan 2021 10:57:17 -0800 Subject: [PATCH 169/182] reference eventsource 2.0 in gemspec --- Gemfile.lock | 4 ++-- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index f47034a8..632f9dcf 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -5,7 +5,7 @@ PATH concurrent-ruby (~> 1.1) http (~> 4.4.1) json (~> 2.3.1) - ld-eventsource (= 2.0.0.pre.beta.1) + ld-eventsource (~> 2.0) semantic (~> 1.6) GEM @@ -54,7 +54,7 @@ GEM ffi-compiler (>= 1.0, < 2.0) jmespath (1.4.0) json (2.3.1) - ld-eventsource (2.0.0.pre.beta.1) + ld-eventsource (2.0.0) concurrent-ruby (~> 1.0) http (~> 4.4.1) listen (3.4.1) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 411ba4c1..dcf281fe 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -36,7 +36,7 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.1" - spec.add_runtime_dependency "ld-eventsource", "2.0.0.pre.beta.1" + spec.add_runtime_dependency "ld-eventsource", "~> 2.0" # lock json to 2.3.x as ruby libraries often remove # support for older ruby versions in minor releases From ad0c4f24a8363a9a08df5aa331629c9651c8d5c4 Mon Sep 17 00:00:00 2001 From: Harpo Roeder Date: Tue, 26 Jan 2021 10:58:51 -0800 Subject: [PATCH 170/182] add 5.x releasable branch for releaser --- .ldrelease/config.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index f758fcaf..4f3d0b67 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -2,6 +2,10 @@ repo: public: ruby-server-sdk private: ruby-server-sdk-private +releasableBranches: + - name: master + - name: 5.x + publications: - url: https://rubygems.org/gems/launchdarkly-server-sdk description: RubyGems From 567f54e1e0e3a1f1cb5289e7fd0d51fcf5f99984 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 26 Jan 2021 13:29:03 -0800 Subject: [PATCH 171/182] use Ruby 2.6.6 in releases --- .ldrelease/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.ldrelease/config.yml b/.ldrelease/config.yml index f148c89e..c1fcca80 100644 --- a/.ldrelease/config.yml +++ b/.ldrelease/config.yml @@ -17,7 +17,7 @@ template: circleci: linux: - image: circleci/ruby:2.6.2-stretch + image: circleci/ruby:2.6.6-buster context: org-global env: LD_SKIP_DATABASE_TESTS: "1" # Don't run Redis/Consul/DynamoDB tests in release; they are run in CI From efec41fdea7d145aebb946ff94b9cc1c667b4665 Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Wed, 3 Feb 2021 15:07:52 -0800 Subject: [PATCH 172/182] Removed the guides link --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index ef8c0e33..2f7b01c6 100644 --- a/README.md +++ b/README.md @@ -55,4 +55,3 @@ About LaunchDarkly * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates - * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies From 7601ec78fcd73292af54831d2f7c8588b38c0e12 Mon Sep 17 00:00:00 2001 From: hroederld Date: Thu, 4 Feb 2021 12:29:37 -0800 Subject: [PATCH 173/182] [ch99757] add alias method (#147) --- Gemfile.lock | 2 +- azure-pipelines.yml | 2 +- lib/ldclient-rb/events.rb | 2 ++ lib/ldclient-rb/impl/event_factory.rb | 22 ++++++++++++ lib/ldclient-rb/ldclient.rb | 17 +++++++++ spec/events_spec.rb | 10 ++++++ spec/ldclient_spec.rb | 50 +++++++++++++++++++++++++++ 7 files changed, 103 insertions(+), 2 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 632f9dcf..ff4cdf63 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ PATH remote: . specs: - launchdarkly-server-sdk (5.8.2) + launchdarkly-server-sdk (6.0.0) concurrent-ruby (~> 1.1) http (~> 4.4.1) json (~> 2.3.1) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 88296f02..cb66e704 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,7 +45,7 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | ruby -v - gem install bundler + gem install bundler:2.2.7 bundle install mkdir rspec bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 2e26e1fa..c59db7d0 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -439,6 +439,7 @@ def make_output_event(event) out[:variation] = event[:variation] if event.has_key?(:variation) out[:version] = event[:version] if event.has_key?(:version) out[:prereqOf] = event[:prereqOf] if event.has_key?(:prereqOf) + out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind) if @inline_users || is_debug out[:user] = process_user(event) else @@ -466,6 +467,7 @@ def make_output_event(event) out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end out[:metricValue] = event[:metricValue] if event.has_key?(:metricValue) + out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind) out when "index" { diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 2e7d2697..256eea98 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -28,6 +28,7 @@ def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] e[:prereqOf] = prereq_of_flag[:key] if !prereq_of_flag.nil? e[:reason] = detail.reason if add_experiment_data || @with_reasons + e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] e end @@ -43,6 +44,7 @@ def new_default_event(flag, user, default_value, reason) e[:trackEvents] = true if flag[:trackEvents] e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] e[:reason] = reason if @with_reasons + e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] e end @@ -55,6 +57,7 @@ def new_unknown_flag_event(key, user, default_value, reason) default: default_value } e[:reason] = reason if @with_reasons + e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] e end @@ -66,6 +69,16 @@ def new_identify_event(user) } end + def new_alias_event(current_context, previous_context) + { + kind: 'alias', + key: current_context[:key], + contextKind: context_to_context_kind(current_context), + previousKey: previous_context[:key], + previousContextKind: context_to_context_kind(previous_context) + } + end + def new_custom_event(event_name, user, data, metric_value) e = { kind: 'custom', @@ -74,11 +87,20 @@ def new_custom_event(event_name, user, data, metric_value) } e[:data] = data if !data.nil? e[:metricValue] = metric_value if !metric_value.nil? + e[:contextKind] = context_to_context_kind(user) if !user.nil? && user[:anonymous] e end private + def context_to_context_kind(user) + if !user.nil? && user[:anonymous] + return "anonymousUser" + else + return "user" + end + end + def is_experiment(flag, reason) return false if !reason case reason[:kind] diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index cfa63351..5d803ef3 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -282,6 +282,23 @@ def track(event_name, user, data = nil, metric_value = nil) @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data, metric_value)) end + # + # Associates a new and old user object for analytics purposes via an alias event. + # + # @param current_context [Hash] The current version of a user. + # @param previous_context [Hash] The previous version of a user. + # @return [void] + # + def alias(current_context, previous_context) + if !current_context || current_context[:key].nil? || !previous_context || previous_context[:key].nil? + @config.logger.warn("Alias called with nil user or nil user key!") + return + end + sanitize_user(current_context) + sanitize_user(previous_context) + @event_processor.add_event(@event_factory_default.new_alias_event(current_context, previous_context)) + end + # # Returns all feature flag values for the given user. # diff --git a/spec/events_spec.rb b/spec/events_spec.rb index d7854567..e9a6d6ff 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -408,6 +408,16 @@ def with_processor_and_sender(config) end end + it "queues alias event" do + with_processor_and_sender(default_config) do |ep, sender| + e = { kind: "alias", key: "a", contextKind: "user", previousKey: "b", previousContextKind: "user" } + ep.add_event(e) + + output = flush_and_get_events(ep, sender) + expect(output).to contain_exactly(e) + end + end + it "treats nil value for custom the same as an empty hash" do with_processor_and_sender(default_config) do |ep, sender| user_with_nil_custom = { key: "userkey", custom: nil } diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 76e5b0f7..f7d215e2 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -25,6 +25,12 @@ } } end + let(:user_anonymous) do + { + key: "anonymous@test.com", + anonymous: true + } + end let(:numeric_key_user) do { key: 33, @@ -155,6 +161,24 @@ def event_processor client.variation("key", nil, "default") end + it "queues a feature event for an existing feature when user is anonymous" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "feature", + key: "key", + version: 100, + contextKind: "anonymousUser", + user: user_anonymous, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + debugEventsUntilDate: 1000 + )) + client.variation("key", user_anonymous, "default") + end + it "queues a feature event for an existing feature when user key is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) @@ -455,6 +479,12 @@ def event_processor client.track("custom_event_name", user, nil, 1.5) end + it "includes contextKind with anonymous user" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: user_anonymous, metricValue: 2.2, contextKind: "anonymousUser")) + client.track("custom_event_name", user_anonymous, nil, 2.2) + end + it "sanitizes the user in the event" do expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.track("custom_event_name", numeric_key_user, nil) @@ -473,6 +503,26 @@ def event_processor end end + describe '#alias' do + it "queues up an alias event" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "alias", key: user[:key], contextKind: "user", previousKey: user_anonymous[:key], previousContextKind: "anonymousUser")) + client.alias(user, user_anonymous) + end + + it "does not send an event, and logs a warning, if user is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.alias(nil, nil) + end + + it "does not send an event, and logs a warning, if user key is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.alias(user_without_key, user_without_key) + end + end + describe '#identify' do it "queues up an identify event" do expect(event_processor).to receive(:add_event).with(hash_including(kind: "identify", key: user[:key], user: user)) From 162c596aa8ac6d90ca7327387a3adf776ea299ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 5 Feb 2021 18:20:08 -0800 Subject: [PATCH 174/182] don't send event for nil user evaluation --- lib/ldclient-rb/ldclient.rb | 13 ++++++------- spec/ldclient_spec.rb | 22 +++++++++++----------- 2 files changed, 17 insertions(+), 18 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 5d803ef3..37d80e9a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -401,6 +401,12 @@ def evaluate_internal(key, user, default, event_factory) return Evaluator.error_result(EvaluationReason::ERROR_CLIENT_NOT_READY, default) end + unless user + @config.logger.error { "[LDClient] Must specify user" } + detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) + return detail + end + if !initialized? if @store.initialized? @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } @@ -421,13 +427,6 @@ def evaluate_internal(key, user, default, event_factory) return detail end - unless user - @config.logger.error { "[LDClient] Must specify user" } - detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) - @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) - return detail - end - begin res = @evaluator.evaluate(feature, user, event_factory) if !res.events.nil? diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index f7d215e2..cad4c03c 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -145,19 +145,11 @@ def event_processor client.variation("key", user, "default") end - it "queues a feature event for an existing feature when user is nil" do + it "does not send an event if user is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - user: nil, - value: "default", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000 - )) + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:error) client.variation("key", nil, "default") end @@ -313,6 +305,14 @@ def event_processor )) client.variation_detail("key", user, "default") end + + it "does not send an event if user is nil" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:error) + client.variation_detail("key", nil, "default") + end end describe '#all_flags' do From 3b557096a8d4c8fcdbc12cb7f35f942df1f7f352 Mon Sep 17 00:00:00 2001 From: hroederld Date: Fri, 5 Feb 2021 18:22:51 -0800 Subject: [PATCH 175/182] remove lockfile (#148) --- .gitignore | 1 + Gemfile.lock | 116 -------------------------------------------- azure-pipelines.yml | 2 +- 3 files changed, 2 insertions(+), 117 deletions(-) delete mode 100644 Gemfile.lock diff --git a/.gitignore b/.gitignore index 9e998e64..d327dbe8 100644 --- a/.gitignore +++ b/.gitignore @@ -13,3 +13,4 @@ mkmf.log *.gem .DS_Store +Gemfile.lock \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock deleted file mode 100644 index ff4cdf63..00000000 --- a/Gemfile.lock +++ /dev/null @@ -1,116 +0,0 @@ -PATH - remote: . - specs: - launchdarkly-server-sdk (6.0.0) - concurrent-ruby (~> 1.1) - http (~> 4.4.1) - json (~> 2.3.1) - ld-eventsource (~> 2.0) - semantic (~> 1.6) - -GEM - remote: https://rubygems.org/ - specs: - addressable (2.7.0) - public_suffix (>= 2.0.2, < 5.0) - ansi (1.5.0) - ast (2.4.2) - aws-eventstream (1.1.0) - aws-partitions (1.418.0) - aws-sdk-core (3.111.2) - aws-eventstream (~> 1, >= 1.0.2) - aws-partitions (~> 1, >= 1.239.0) - aws-sigv4 (~> 1.1) - jmespath (~> 1.0) - aws-sdk-dynamodb (1.58.0) - aws-sdk-core (~> 3, >= 3.109.0) - aws-sigv4 (~> 1.1) - aws-sigv4 (1.2.2) - aws-eventstream (~> 1, >= 1.0.2) - concurrent-ruby (1.1.8) - connection_pool (2.2.3) - deep_merge (1.2.1) - diff-lcs (1.4.4) - diplomat (2.4.2) - deep_merge (~> 1.0, >= 1.0.1) - faraday (>= 0.9, < 1.1.0) - domain_name (0.5.20190701) - unf (>= 0.0.5, < 1.0.0) - faraday (1.0.1) - multipart-post (>= 1.2, < 3) - ffi (1.14.2) - ffi-compiler (1.0.1) - ffi (>= 1.0.0) - rake - http (4.4.1) - addressable (~> 2.3) - http-cookie (~> 1.0) - http-form_data (~> 2.2) - http-parser (~> 1.2.0) - http-cookie (1.0.3) - domain_name (~> 0.5) - http-form_data (2.3.0) - http-parser (1.2.3) - ffi-compiler (>= 1.0, < 2.0) - jmespath (1.4.0) - json (2.3.1) - ld-eventsource (2.0.0) - concurrent-ruby (~> 1.0) - http (~> 4.4.1) - listen (3.4.1) - rb-fsevent (~> 0.10, >= 0.10.3) - rb-inotify (~> 0.9, >= 0.9.10) - multipart-post (2.1.1) - oga (2.15) - ast - ruby-ll (~> 2.1) - public_suffix (4.0.6) - rake (13.0.3) - rb-fsevent (0.10.4) - rb-inotify (0.10.1) - ffi (~> 1.0) - redis (4.2.5) - rspec (3.10.0) - rspec-core (~> 3.10.0) - rspec-expectations (~> 3.10.0) - rspec-mocks (~> 3.10.0) - rspec-core (3.10.1) - rspec-support (~> 3.10.0) - rspec-expectations (3.10.1) - diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.10.0) - rspec-mocks (3.10.1) - diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.10.0) - rspec-support (3.10.1) - rspec_junit_formatter (0.4.1) - rspec-core (>= 2, < 4, != 2.12.0) - ruby-ll (2.1.2) - ansi - ast - semantic (1.6.1) - timecop (0.9.2) - unf (0.1.4) - unf_ext - unf_ext (0.0.7.7) - webrick (1.7.0) - -PLATFORMS - ruby - -DEPENDENCIES - aws-sdk-dynamodb (~> 1.57) - bundler (~> 2.1) - connection_pool (~> 2.2.3) - diplomat (~> 2.4.2) - launchdarkly-server-sdk! - listen (~> 3.3) - oga (~> 2.2) - redis (~> 4.2) - rspec (~> 3.10) - rspec_junit_formatter (~> 0.4) - timecop (~> 0.9) - webrick (~> 1.7) - -BUNDLED WITH - 2.2.3 diff --git a/azure-pipelines.yml b/azure-pipelines.yml index cb66e704..88296f02 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -45,7 +45,7 @@ jobs: workingDirectory: $(System.DefaultWorkingDirectory) script: | ruby -v - gem install bundler:2.2.7 + gem install bundler bundle install mkdir rspec bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec From e7f4aaa4d76470855261397dc955fa2dc20f5227 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 5 Feb 2021 18:30:51 -0800 Subject: [PATCH 176/182] rm redundant nil check --- lib/ldclient-rb/events.rb | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index c59db7d0..7b77c4db 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -443,7 +443,7 @@ def make_output_event(event) if @inline_users || is_debug out[:user] = process_user(event) else - out[:userKey] = event[:user].nil? ? nil : event[:user][:key] + out[:userKey] = event[:user][:key] end out[:reason] = event[:reason] if !event[:reason].nil? out @@ -451,7 +451,7 @@ def make_output_event(event) { kind: "identify", creationDate: event[:creationDate], - key: event[:user].nil? ? nil : event[:user][:key].to_s, + key: event[:user][:key].to_s, user: process_user(event) } when "custom" @@ -464,7 +464,7 @@ def make_output_event(event) if @inline_users out[:user] = process_user(event) else - out[:userKey] = event[:user].nil? ? nil : event[:user][:key] + out[:userKey] = event[:user][:key] end out[:metricValue] = event[:metricValue] if event.has_key?(:metricValue) out[:contextKind] = event[:contextKind] if event.has_key?(:contextKind) From 0452cd97ac3e78472a4fe0dc3646cff818af673b Mon Sep 17 00:00:00 2001 From: Kerrie Martinez Date: Fri, 14 May 2021 12:32:33 -0700 Subject: [PATCH 177/182] Experiment Allocation Changes (#150) * WIP - from sam's pairing session * starting sdk changes * adding tests and making sure everything works * adding more tests * removing the singleton for fallthrough * Revert "removing the singleton for fallthrough" This reverts commit dff7adbb809ecc63118d0fbff9742a88a039c679. * taking a different approach to keep things immutable * adding tests for untracked * remove unnecessary comment * making sure to return two values in all code paths Co-authored-by: pellyg-ld --- lib/ldclient-rb/evaluation_detail.rb | 45 ++++++-- lib/ldclient-rb/impl/evaluator.rb | 10 +- lib/ldclient-rb/impl/evaluator_bucketing.rb | 29 ++++-- lib/ldclient-rb/impl/event_factory.rb | 6 ++ spec/impl/evaluator_bucketing_spec.rb | 48 +++++++-- spec/impl/evaluator_rule_spec.rb | 32 ++++++ spec/impl/evaluator_spec.rb | 44 ++++++++ spec/impl/event_factory_spec.rb | 108 ++++++++++++++++++++ 8 files changed, 294 insertions(+), 28 deletions(-) create mode 100644 spec/impl/event_factory_spec.rb diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index bccaf133..dc2e6bab 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -120,6 +120,9 @@ class EvaluationReason # or deleted. If {#kind} is not {#RULE_MATCH}, this will be `nil`. attr_reader :rule_id + # A boolean or nil value representing if the rule or fallthrough has an experiment rollout. + attr_reader :in_experiment + # The key of the prerequisite flag that did not return the desired variation. If {#kind} is not # {#PREREQUISITE_FAILED}, this will be `nil`. attr_reader :prerequisite_key @@ -136,8 +139,12 @@ def self.off # Returns an instance whose {#kind} is {#FALLTHROUGH}. # @return [EvaluationReason] - def self.fallthrough - @@fallthrough + def self.fallthrough(in_experiment=false) + if in_experiment + @@fallthrough_with_experiment + else + @@fallthrough + end end # Returns an instance whose {#kind} is {#TARGET_MATCH}. @@ -153,10 +160,16 @@ def self.target_match # @param rule_id [String] unique string identifier for the matched rule # @return [EvaluationReason] # @raise [ArgumentError] if `rule_index` is not a number or `rule_id` is not a string - def self.rule_match(rule_index, rule_id) + def self.rule_match(rule_index, rule_id, in_experiment=false) raise ArgumentError.new("rule_index must be a number") if !(rule_index.is_a? Numeric) raise ArgumentError.new("rule_id must be a string") if !rule_id.nil? && !(rule_id.is_a? String) # in test data, ID could be nil - new(:RULE_MATCH, rule_index, rule_id, nil, nil) + + if in_experiment + er = new(:RULE_MATCH, rule_index, rule_id, nil, nil, true) + else + er = new(:RULE_MATCH, rule_index, rule_id, nil, nil) + end + er end # Returns an instance whose {#kind} is {#PREREQUISITE_FAILED}. @@ -204,11 +217,17 @@ def to_s def inspect case @kind when :RULE_MATCH - "RULE_MATCH(#{@rule_index},#{@rule_id})" + if @in_experiment + "RULE_MATCH(#{@rule_index},#{@rule_id},#{@in_experiment})" + else + "RULE_MATCH(#{@rule_index},#{@rule_id})" + end when :PREREQUISITE_FAILED "PREREQUISITE_FAILED(#{@prerequisite_key})" when :ERROR "ERROR(#{@error_kind})" + when :FALLTHROUGH + @in_experiment ? "FALLTHROUGH(#{@in_experiment})" : @kind.to_s else @kind.to_s end @@ -225,11 +244,21 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json # as_json and then modify the result. case @kind when :RULE_MATCH - { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id } + if @in_experiment + { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id, in_experiment: @in_experiment } + else + { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id } + end when :PREREQUISITE_FAILED { kind: @kind, prerequisiteKey: @prerequisite_key } when :ERROR { kind: @kind, errorKind: @error_kind } + when :FALLTHROUGH + if @in_experiment + { kind: @kind, in_experiment: @in_experiment } + else + { kind: @kind } + end else { kind: @kind } end @@ -263,7 +292,7 @@ def [](key) private - def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind) + def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind, in_experiment=nil) @kind = kind.to_sym @rule_index = rule_index @rule_id = rule_id @@ -271,6 +300,7 @@ def initialize(kind, rule_index, rule_id, prerequisite_key, error_kind) @prerequisite_key = prerequisite_key @prerequisite_key.freeze if !prerequisite_key.nil? @error_kind = error_kind + @in_experiment = in_experiment end private_class_method :new @@ -279,6 +309,7 @@ def self.make_error(error_kind) new(:ERROR, nil, nil, nil, error_kind) end + @@fallthrough_with_experiment = new(:FALLTHROUGH, nil, nil, nil, nil, true) @@fallthrough = new(:FALLTHROUGH, nil, nil, nil, nil) @@off = new(:OFF, nil, nil, nil, nil) @@target_match = new(:TARGET_MATCH, nil, nil, nil, nil) diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index d441eb42..00898cd9 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -190,7 +190,7 @@ def segment_rule_match_user(rule, user, segment_key, salt) return true if !rule[:weight] # All of the clauses are met. See if the user buckets in - bucket = EvaluatorBucketing.bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt) + bucket = EvaluatorBucketing.bucket_user(user, segment_key, rule[:bucketBy].nil? ? "key" : rule[:bucketBy], salt, nil) weight = rule[:weight].to_f / 100000.0 return bucket < weight end @@ -213,7 +213,13 @@ def get_off_value(flag, reason) end def get_value_for_variation_or_rollout(flag, vr, user, reason) - index = EvaluatorBucketing.variation_index_for_user(flag, vr, user) + index, in_experiment = EvaluatorBucketing.variation_index_for_user(flag, vr, user) + #if in experiment is true, set reason to a different reason instance/singleton with in_experiment set + if in_experiment && reason.kind == :FALLTHROUGH + reason = EvaluationReason::fallthrough(in_experiment) + elsif in_experiment && reason.kind == :RULE_MATCH + reason = EvaluationReason::rule_match(reason.rule_index, reason.rule_id, in_experiment) + end if index.nil? @logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") return Evaluator.error_result(EvaluationReason::ERROR_MALFORMED_FLAG) diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index b3d14ed1..f2f2075f 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -10,20 +10,29 @@ module EvaluatorBucketing # @param user [Object] the user properties # @return [Number] the variation index, or nil if there is an error def self.variation_index_for_user(flag, rule, user) + in_experiment = nil + variation = rule[:variation] - return variation if !variation.nil? # fixed variation + return variation, in_experiment if !variation.nil? # fixed variation rollout = rule[:rollout] - return nil if rollout.nil? + return nil, in_experiment if rollout.nil? variations = rollout[:variations] if !variations.nil? && variations.length > 0 # percentage rollout rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] - bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt]) + + seed = rollout[:seed] + bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt], seed) # may not be present sum = 0; variations.each do |variate| + if rule[:rollout][:kind] == "experiment" && !variate[:untracked] + in_experiment = true + end + sum += variate[:weight].to_f / 100000.0 + if bucket < sum - return variate[:variation] + return variate[:variation], in_experiment end end # The user's bucket value was greater than or equal to the end of the last bucket. This could happen due @@ -31,9 +40,9 @@ def self.variation_index_for_user(flag, rule, user) # data could contain buckets that don't actually add up to 100000. Rather than returning an error in # this case (or changing the scaling, which would potentially change the results for *all* users), we # will simply put the user in the last bucket. - variations[-1][:variation] + [ variations[-1][:variation], in_experiment ] else # the rule isn't well-formed - nil + [ nil, in_experiment ] end end @@ -44,7 +53,7 @@ def self.variation_index_for_user(flag, rule, user) # @param bucket_by [String|Symbol] the name of the user attribute to be used for bucketing # @param salt [String] the feature flag's or segment's salt value # @return [Number] the bucket value, from 0 inclusive to 1 exclusive - def self.bucket_user(user, key, bucket_by, salt) + def self.bucket_user(user, key, bucket_by, salt, seed) return nil unless user[:key] id_hash = bucketable_string_value(EvaluatorOperators.user_value(user, bucket_by)) @@ -56,7 +65,11 @@ def self.bucket_user(user, key, bucket_by, salt) id_hash += "." + user[:secondary].to_s end - hash_key = "%s.%s.%s" % [key, salt, id_hash] + if seed + hash_key = "%d.%s" % [seed, id_hash] + else + hash_key = "%s.%s.%s" % [key, salt, id_hash] + end hash_val = (Digest::SHA1.hexdigest(hash_key))[0..14] hash_val.to_i(16) / Float(0xFFFFFFFFFFFFFFF) diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 256eea98..691339d7 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -103,6 +103,11 @@ def context_to_context_kind(user) def is_experiment(flag, reason) return false if !reason + + if reason.in_experiment + return true + end + case reason[:kind] when 'RULE_MATCH' index = reason[:ruleIndex] @@ -115,6 +120,7 @@ def is_experiment(flag, reason) end false end + end end end diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index a9c79b5c..35775838 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -4,17 +4,43 @@ subject { LaunchDarkly::Impl::EvaluatorBucketing } describe "bucket_user" do + describe "seed exists" do + let(:seed) { 61 } + it "gets the expected bucket values for seed" do + user = { key: "userKeyA" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + expect(bucket).to be_within(0.0000001).of(0.09801207); + + user = { key: "userKeyB" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + expect(bucket).to be_within(0.0000001).of(0.14483777); + + user = { key: "userKeyC" } + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + expect(bucket).to be_within(0.0000001).of(0.9242641); + end + + it "should return the same bucket if the seed and user is the same" do + user = { key: "userKeyA" } + bucket1 = subject.bucket_user(user, "hashKey", "bucket_by", "saltyA", seed) + bucket2 = subject.bucket_user(user, "hashKey1", "bucket_by", "saltyB", seed) + bucket3 = subject.bucket_user(user, "hashKey2", "bucket_by", "saltyC", seed) + expect(bucket1).to eq(bucket2) + expect(bucket2).to eq(bucket3) + end + end + it "gets expected bucket values for specific keys" do user = { key: "userKeyA" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.42157587); user = { key: "userKeyB" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.6708485); user = { key: "userKeyC" } - bucket = subject.bucket_user(user, "hashKey", "key", "saltyA") + bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", nil) expect(bucket).to be_within(0.0000001).of(0.10343106); end @@ -26,8 +52,8 @@ intAttr: 33333 } } - stringResult = subject.bucket_user(user, "hashKey", "stringAttr", "saltyA") - intResult = subject.bucket_user(user, "hashKey", "intAttr", "saltyA") + stringResult = subject.bucket_user(user, "hashKey", "stringAttr", "saltyA", nil) + intResult = subject.bucket_user(user, "hashKey", "intAttr", "saltyA", nil) expect(intResult).to be_within(0.0000001).of(0.54771423) expect(intResult).to eq(stringResult) @@ -40,7 +66,7 @@ floatAttr: 33.5 } } - result = subject.bucket_user(user, "hashKey", "floatAttr", "saltyA") + result = subject.bucket_user(user, "hashKey", "floatAttr", "saltyA", nil) expect(result).to eq(0.0) end @@ -52,7 +78,7 @@ boolAttr: true } } - result = subject.bucket_user(user, "hashKey", "boolAttr", "saltyA") + result = subject.bucket_user(user, "hashKey", "boolAttr", "saltyA", nil) expect(result).to eq(0.0) end end @@ -65,7 +91,7 @@ # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, # so we can construct a rollout whose second bucket just barely contains that value - bucket_value = (subject.bucket_user(user, flag_key, "key", salt) * 100000).truncate() + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() expect(bucket_value).to be > 0 expect(bucket_value).to be < 100000 @@ -83,7 +109,7 @@ } flag = { key: flag_key, salt: salt } - result_variation = subject.variation_index_for_user(flag, rule, user) + result_variation, _ = subject.variation_index_for_user(flag, rule, user) expect(result_variation).to be matched_variation end @@ -92,7 +118,7 @@ flag_key = "flagkey" salt = "salt" - bucket_value = (subject.bucket_user(user, flag_key, "key", salt) * 100000).truncate() + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value rule = { @@ -104,7 +130,7 @@ } flag = { key: flag_key, salt: salt } - result_variation = subject.variation_index_for_user(flag, rule, user) + result_variation, _ = subject.variation_index_for_user(flag, rule, user) expect(result_variation).to be 0 end end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index a1ae5d66..8f6c207f 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -91,6 +91,38 @@ module Impl result = basic_evaluator.evaluate(flag, user, factory) expect(result.detail.reason).to eq(EvaluationReason::rule_match(0, 'ruleid')) end + + describe "experiment rollout behavior" do + it "sets the in_experiment value if rollout kind is experiment " do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(true) + end + + it "does not set the in_experiment value if rollout kind is not experiment " do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end + + it "does not set the in_experiment value if rollout kind is experiment and untracked is true" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end + end end end end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index dcf8928b..4b0f3741 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -299,6 +299,50 @@ module Impl expect(result.detail).to eq(detail) expect(result.events).to eq(nil) end + + describe "experiment rollout behavior" do + it "sets the in_experiment value if rollout kind is experiment and untracked false" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(true) + end + + it "does not set the in_experiment value if rollout kind is not experiment" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { kind: 'rollout', variations: [ { weight: 100000, variation: 1, untracked: false } ] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end + + it "does not set the in_experiment value if rollout kind is experiment and untracked is true" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { kind: 'experiment', variations: [ { weight: 100000, variation: 1, untracked: true } ] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + result = basic_evaluator.evaluate(flag, user, factory) + expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.in_experiment).to eq(nil) + end + end end end end diff --git a/spec/impl/event_factory_spec.rb b/spec/impl/event_factory_spec.rb new file mode 100644 index 00000000..9da19de0 --- /dev/null +++ b/spec/impl/event_factory_spec.rb @@ -0,0 +1,108 @@ +require "spec_helper" + +describe LaunchDarkly::Impl::EventFactory do + subject { LaunchDarkly::Impl::EventFactory } + + describe "#new_eval_event" do + let(:event_factory_without_reason) { subject.new(false) } + let(:user) { { 'key': 'userA' } } + let(:rule_with_experiment_rollout) { + { id: 'ruleid', + clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + trackEvents: false, + rollout: { kind: 'experiment', salt: '', variations: [ { weight: 100000, variation: 0, untracked: false } ] } + } + } + + let(:rule_with_rollout) { + { id: 'ruleid', + trackEvents: false, + clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { salt: '', variations: [ { weight: 100000, variation: 0, untracked: false } ] } + } + } + + let(:fallthrough_with_rollout) { + { rollout: { kind: 'rollout', salt: '', variations: [ { weight: 100000, variation: 0, untracked: false } ], trackEventsFallthrough: false } } + } + + let(:rule_reason) { LaunchDarkly::EvaluationReason::rule_match(0, 'ruleid') } + let(:rule_reason_with_experiment) { LaunchDarkly::EvaluationReason::rule_match(0, 'ruleid', true) } + let(:fallthrough_reason) { LaunchDarkly::EvaluationReason::fallthrough } + let(:fallthrough_reason_with_experiment) { LaunchDarkly::EvaluationReason::fallthrough(true) } + + context "in_experiment is true" do + it "sets the reason and trackevents: true for rules" do + flag = createFlag('rule', rule_with_experiment_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, rule_reason_with_experiment) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason].to_s).to eql("RULE_MATCH(0,ruleid,true)") + end + + it "sets the reason and trackevents: true for the fallthrough" do + fallthrough_with_rollout[:kind] = 'experiment' + flag = createFlag('fallthrough', fallthrough_with_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason_with_experiment) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason].to_s).to eql("FALLTHROUGH(true)") + end + end + + context "in_experiment is false" do + it "sets the reason & trackEvents: true if rule has trackEvents set to true" do + rule_with_rollout[:trackEvents] = true + flag = createFlag('rule', rule_with_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, rule_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason].to_s).to eql("RULE_MATCH(0,ruleid)") + end + + it "sets the reason & trackEvents: true if fallthrough has trackEventsFallthrough set to true" do + flag = createFlag('fallthrough', fallthrough_with_rollout) + flag[:trackEventsFallthrough] = true + detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason].to_s).to eql("FALLTHROUGH") + end + + it "doesn't set the reason & trackEvents if rule has trackEvents set to false" do + flag = createFlag('rule', rule_with_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, rule_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to be_nil + expect(r[:reason]).to be_nil + end + + it "doesn't set the reason & trackEvents if fallthrough has trackEventsFallthrough set to false" do + flag = createFlag('fallthrough', fallthrough_with_rollout) + detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to be_nil + expect(r[:reason]).to be_nil + end + + it "sets trackEvents true and doesn't set the reason if flag[:trackEvents] = true" do + flag = createFlag('fallthrough', fallthrough_with_rollout) + flag[:trackEvents] = true + detail = LaunchDarkly::EvaluationDetail.new(true, 0, fallthrough_reason) + r = subject.new(false).new_eval_event(flag, user, detail, nil, nil) + expect(r[:trackEvents]).to eql(true) + expect(r[:reason]).to be_nil + end + end + end + + def createFlag(kind, rule) + if kind == 'rule' + { key: 'feature', on: true, rules: [rule], fallthrough: { variation: 0 }, variations: [ false, true ] } + elsif kind == 'fallthrough' + { key: 'feature', on: true, fallthrough: rule, variations: [ false, true ] } + else + { key: 'feature', on: true, fallthrough: { variation: 0 }, variations: [ false, true ] } + end + end +end \ No newline at end of file From 4418ccef9cfca5d91380466838a47ff33e6501c4 Mon Sep 17 00:00:00 2001 From: Sam Stokes Date: Tue, 15 Jun 2021 10:45:13 -0700 Subject: [PATCH 178/182] Use camelCase for JSON property names (#151) The in_experiment attribute was added to reasons as part of #150 but it doesn't appear to be received in events. I think that's because it's sending it in JSON as "in_experiment" rather than "inExperiment" as we expect to parse it. --- lib/ldclient-rb/evaluation_detail.rb | 4 ++-- spec/impl/evaluator_rule_spec.rb | 6 +++--- spec/impl/evaluator_spec.rb | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lib/ldclient-rb/evaluation_detail.rb b/lib/ldclient-rb/evaluation_detail.rb index dc2e6bab..4eae67bc 100644 --- a/lib/ldclient-rb/evaluation_detail.rb +++ b/lib/ldclient-rb/evaluation_detail.rb @@ -245,7 +245,7 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json case @kind when :RULE_MATCH if @in_experiment - { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id, in_experiment: @in_experiment } + { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id, inExperiment: @in_experiment } else { kind: @kind, ruleIndex: @rule_index, ruleId: @rule_id } end @@ -255,7 +255,7 @@ def as_json(*) # parameter is unused, but may be passed if we're using the json { kind: @kind, errorKind: @error_kind } when :FALLTHROUGH if @in_experiment - { kind: @kind, in_experiment: @in_experiment } + { kind: @kind, inExperiment: @in_experiment } else { kind: @kind } end diff --git a/spec/impl/evaluator_rule_spec.rb b/spec/impl/evaluator_rule_spec.rb index 8f6c207f..7299decb 100644 --- a/spec/impl/evaluator_rule_spec.rb +++ b/spec/impl/evaluator_rule_spec.rb @@ -99,7 +99,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to include('"in_experiment":true') + expect(result.detail.reason.to_json).to include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(true) end @@ -109,7 +109,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end @@ -119,7 +119,7 @@ module Impl flag = boolean_flag_with_rules([rule]) user = { key: "userkey", secondary: 999 } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end end diff --git a/spec/impl/evaluator_spec.rb b/spec/impl/evaluator_spec.rb index 4b0f3741..543b524d 100644 --- a/spec/impl/evaluator_spec.rb +++ b/spec/impl/evaluator_spec.rb @@ -311,7 +311,7 @@ module Impl } user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to include('"in_experiment":true') + expect(result.detail.reason.to_json).to include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(true) end @@ -325,7 +325,7 @@ module Impl } user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end @@ -339,7 +339,7 @@ module Impl } user = { key: 'userkey' } result = basic_evaluator.evaluate(flag, user, factory) - expect(result.detail.reason.to_json).to_not include('"in_experiment":true') + expect(result.detail.reason.to_json).to_not include('"inExperiment":true') expect(result.detail.reason.in_experiment).to eq(nil) end end From 4c2fd31e1915192ddd318b010b7534486ef1b8b4 Mon Sep 17 00:00:00 2001 From: Kerrie Martinez Date: Wed, 16 Jun 2021 16:59:34 -0700 Subject: [PATCH 179/182] fixing ruby logic causing ih failures (#152) * fixing ruby logic * adding missing spec * Apply suggestions from code review Co-authored-by: Sam Stokes * pr tweaks * making spec language consistent Co-authored-by: Sam Stokes --- lib/ldclient-rb/impl/evaluator_bucketing.rb | 18 +-- spec/impl/evaluator_bucketing_spec.rb | 127 ++++++++++++++++---- 2 files changed, 112 insertions(+), 33 deletions(-) diff --git a/lib/ldclient-rb/impl/evaluator_bucketing.rb b/lib/ldclient-rb/impl/evaluator_bucketing.rb index f2f2075f..11842f74 100644 --- a/lib/ldclient-rb/impl/evaluator_bucketing.rb +++ b/lib/ldclient-rb/impl/evaluator_bucketing.rb @@ -10,29 +10,26 @@ module EvaluatorBucketing # @param user [Object] the user properties # @return [Number] the variation index, or nil if there is an error def self.variation_index_for_user(flag, rule, user) - in_experiment = nil variation = rule[:variation] - return variation, in_experiment if !variation.nil? # fixed variation + return variation, false if !variation.nil? # fixed variation rollout = rule[:rollout] - return nil, in_experiment if rollout.nil? + return nil, false if rollout.nil? variations = rollout[:variations] if !variations.nil? && variations.length > 0 # percentage rollout - rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] seed = rollout[:seed] bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt], seed) # may not be present sum = 0; variations.each do |variate| - if rule[:rollout][:kind] == "experiment" && !variate[:untracked] + if rollout[:kind] == "experiment" && !variate[:untracked] in_experiment = true end sum += variate[:weight].to_f / 100000.0 - if bucket < sum - return variate[:variation], in_experiment + return variate[:variation], !!in_experiment end end # The user's bucket value was greater than or equal to the end of the last bucket. This could happen due @@ -40,9 +37,12 @@ def self.variation_index_for_user(flag, rule, user) # data could contain buckets that don't actually add up to 100000. Rather than returning an error in # this case (or changing the scaling, which would potentially change the results for *all* users), we # will simply put the user in the last bucket. - [ variations[-1][:variation], in_experiment ] + last_variation = variations[-1] + in_experiment = rollout[:kind] == "experiment" && !last_variation[:untracked] + + [last_variation[:variation], in_experiment] else # the rule isn't well-formed - [ nil, in_experiment ] + [nil, false] end end diff --git a/spec/impl/evaluator_bucketing_spec.rb b/spec/impl/evaluator_bucketing_spec.rb index 35775838..98dbd13d 100644 --- a/spec/impl/evaluator_bucketing_spec.rb +++ b/spec/impl/evaluator_bucketing_spec.rb @@ -6,7 +6,7 @@ describe "bucket_user" do describe "seed exists" do let(:seed) { 61 } - it "gets the expected bucket values for seed" do + it "returns the expected bucket values for seed" do user = { key: "userKeyA" } bucket = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) expect(bucket).to be_within(0.0000001).of(0.09801207); @@ -20,14 +20,29 @@ expect(bucket).to be_within(0.0000001).of(0.9242641); end - it "should return the same bucket if the seed and user is the same" do + it "returns the same bucket regardless of hashKey and salt" do user = { key: "userKeyA" } - bucket1 = subject.bucket_user(user, "hashKey", "bucket_by", "saltyA", seed) - bucket2 = subject.bucket_user(user, "hashKey1", "bucket_by", "saltyB", seed) - bucket3 = subject.bucket_user(user, "hashKey2", "bucket_by", "saltyC", seed) + bucket1 = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_user(user, "hashKey1", "key", "saltyB", seed) + bucket3 = subject.bucket_user(user, "hashKey2", "key", "saltyC", seed) expect(bucket1).to eq(bucket2) expect(bucket2).to eq(bucket3) end + + it "returns a different bucket if the seed is not the same" do + user = { key: "userKeyA" } + bucket1 = subject.bucket_user(user, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_user(user, "hashKey1", "key", "saltyB", seed+1) + expect(bucket1).to_not eq(bucket2) + end + + it "returns a different bucket if the user is not the same" do + user1 = { key: "userKeyA" } + user2 = { key: "userKeyB" } + bucket1 = subject.bucket_user(user1, "hashKey", "key", "saltyA", seed) + bucket2 = subject.bucket_user(user2, "hashKey1", "key", "saltyB", seed) + expect(bucket1).to_not eq(bucket2) + end end it "gets expected bucket values for specific keys" do @@ -84,54 +99,118 @@ end describe "variation_index_for_user" do - it "matches bucket" do - user = { key: "userkey" } + context "rollout is not an experiment" do + it "matches bucket" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, + # so we can construct a rollout whose second bucket just barely contains that value + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() + expect(bucket_value).to be > 0 + expect(bucket_value).to be < 100000 + + bad_variation_a = 0 + matched_variation = 1 + bad_variation_b = 2 + rule = { + rollout: { + variations: [ + { variation: bad_variation_a, weight: bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value + { variation: matched_variation, weight: 1 }, # size of this bucket is 1, so it only matches that specific value + { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user) + expect(result_variation).to be matched_variation + expect(inExperiment).to be(false) + end + + it "uses last bucket if bucket value is equal to total weight" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() + + # We'll construct a list of variations that stops right at the target bucket value + rule = { + rollout: { + variations: [ + { variation: 0, weight: bucket_value } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user) + expect(result_variation).to be 0 + expect(inExperiment).to be(false) + end + end + end + + context "rollout is an experiment" do + it "returns whether user is in the experiment or not" do + user1 = { key: "userKeyA" } + user2 = { key: "userKeyB" } + user3 = { key: "userKeyC" } flag_key = "flagkey" salt = "salt" + seed = 61 - # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, - # so we can construct a rollout whose second bucket just barely contains that value - bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() - expect(bucket_value).to be > 0 - expect(bucket_value).to be < 100000 - - bad_variation_a = 0 - matched_variation = 1 - bad_variation_b = 2 + rule = { rollout: { + seed: seed, + kind: 'experiment', variations: [ - { variation: bad_variation_a, weight: bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value - { variation: matched_variation, weight: 1 }, # size of this bucket is 1, so it only matches that specific value - { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) } + { variation: 0, weight: 10000, untracked: false }, + { variation: 2, weight: 20000, untracked: false }, + { variation: 0, weight: 70000 , untracked: true } ] } } flag = { key: flag_key, salt: salt } - result_variation, _ = subject.variation_index_for_user(flag, rule, user) - expect(result_variation).to be matched_variation + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user1) + expect(result_variation).to be(0) + expect(inExperiment).to be(true) + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user2) + expect(result_variation).to be(2) + expect(inExperiment).to be(true) + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user3) + expect(result_variation).to be(0) + expect(inExperiment).to be(false) end it "uses last bucket if bucket value is equal to total weight" do user = { key: "userkey" } flag_key = "flagkey" salt = "salt" + seed = 61 - bucket_value = (subject.bucket_user(user, flag_key, "key", salt, nil) * 100000).truncate() + bucket_value = (subject.bucket_user(user, flag_key, "key", salt, seed) * 100000).truncate() # We'll construct a list of variations that stops right at the target bucket value rule = { rollout: { + seed: seed, + kind: 'experiment', variations: [ - { variation: 0, weight: bucket_value } + { variation: 0, weight: bucket_value, untracked: false } ] } } flag = { key: flag_key, salt: salt } - result_variation, _ = subject.variation_index_for_user(flag, rule, user) + result_variation, inExperiment = subject.variation_index_for_user(flag, rule, user) expect(result_variation).to be 0 + expect(inExperiment).to be(true) end end end From 03dd676ffba5d8cee38f353d9b4ff5f1c774f449 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 14 Jul 2021 16:45:23 -0700 Subject: [PATCH 180/182] add log warning for missing user key (#153) * add log warnings for nil/empty user key * rm warning for empty string key * fix test --- lib/ldclient-rb/ldclient.rb | 6 ++++++ spec/ldclient_spec.rb | 14 +++----------- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 37d80e9a..d96dd1f7 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -407,6 +407,12 @@ def evaluate_internal(key, user, default, event_factory) return detail end + if user[:key].nil? + @config.logger.warn { "[LDClient] Variation called with nil user key; returning default value" } + detail = Evaluator.error_result(EvaluationReason::ERROR_USER_NOT_SPECIFIED, default) + return detail + end + if !initialized? if @store.initialized? @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index cad4c03c..8e2ef650 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -171,20 +171,12 @@ def event_processor client.variation("key", user_anonymous, "default") end - it "queues a feature event for an existing feature when user key is nil" do + it "does not queue a feature event for an existing feature when user key is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) bad_user = { name: "Bob" } - expect(event_processor).to receive(:add_event).with(hash_including( - kind: "feature", - key: "key", - version: 100, - user: bad_user, - value: "default", - default: "default", - trackEvents: true, - debugEventsUntilDate: 1000 - )) + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) client.variation("key", bad_user, "default") end From 0de2cab7f244d105b80617581cd01127a146a113 Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Thu, 5 Aug 2021 17:39:31 -0700 Subject: [PATCH 181/182] diagnostic events should respect HTTPS_PROXY (#154) --- lib/ldclient-rb/impl/diagnostic_events.rb | 2 +- spec/diagnostic_events_spec.rb | 16 +++++++++------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/lib/ldclient-rb/impl/diagnostic_events.rb b/lib/ldclient-rb/impl/diagnostic_events.rb index 4c61a905..13a55756 100644 --- a/lib/ldclient-rb/impl/diagnostic_events.rb +++ b/lib/ldclient-rb/impl/diagnostic_events.rb @@ -79,7 +79,7 @@ def self.make_config_data(config) streamingDisabled: !config.stream?, userKeysCapacity: config.user_keys_capacity, userKeysFlushIntervalMillis: self.seconds_to_millis(config.user_keys_flush_interval), - usingProxy: ENV.has_key?('http_proxy') || ENV.has_key?('https_proxy') || ENV.has_key?('HTTP_PROXY'), + usingProxy: ENV.has_key?('http_proxy') || ENV.has_key?('https_proxy') || ENV.has_key?('HTTP_PROXY') || ENV.has_key?('HTTPS_PROXY'), usingRelayDaemon: config.use_ldd?, } ret diff --git a/spec/diagnostic_events_spec.rb b/spec/diagnostic_events_spec.rb index 0c4ef058..cc55e8f1 100644 --- a/spec/diagnostic_events_spec.rb +++ b/spec/diagnostic_events_spec.rb @@ -79,13 +79,15 @@ def expected_default_config end end - it "detects proxy" do - begin - ENV["http_proxy"] = 'http://my-proxy' - event = default_acc.create_init_event(Config.new) - expect(event[:configuration][:usingProxy]).to be true - ensure - ENV["http_proxy"] = nil + ['http_proxy', 'https_proxy', 'HTTP_PROXY', 'HTTPS_PROXY'].each do |name| + it "detects proxy #{name}" do + begin + ENV["#{name}"] = 'http://my-proxy' + event = default_acc.create_init_event(Config.new) + expect(event[:configuration][:usingProxy]).to be true + ensure + ENV["#{name}"] = nil + end end end From 6bfb0301ab94817c3b94852fb68ab42563369070 Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Fri, 6 Aug 2021 09:51:36 -0700 Subject: [PATCH 182/182] minor test simplification (#155) --- spec/diagnostic_events_spec.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spec/diagnostic_events_spec.rb b/spec/diagnostic_events_spec.rb index cc55e8f1..7e1bce7f 100644 --- a/spec/diagnostic_events_spec.rb +++ b/spec/diagnostic_events_spec.rb @@ -82,11 +82,11 @@ def expected_default_config ['http_proxy', 'https_proxy', 'HTTP_PROXY', 'HTTPS_PROXY'].each do |name| it "detects proxy #{name}" do begin - ENV["#{name}"] = 'http://my-proxy' + ENV[name] = 'http://my-proxy' event = default_acc.create_init_event(Config.new) expect(event[:configuration][:usingProxy]).to be true ensure - ENV["#{name}"] = nil + ENV[name] = nil end end end